1/* Common subexpression elimination library for GNU compiler.
2   Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
24#include "rtl.h"
25#include "hash-set.h"
26#include "machmode.h"
27#include "vec.h"
28#include "double-int.h"
29#include "input.h"
30#include "alias.h"
31#include "symtab.h"
32#include "wide-int.h"
33#include "inchash.h"
34#include "tree.h"/* FIXME: For hashing DEBUG_EXPR & friends.  */
35#include "tm_p.h"
36#include "regs.h"
37#include "hard-reg-set.h"
38#include "flags.h"
39#include "insn-config.h"
40#include "recog.h"
41#include "hashtab.h"
42#include "input.h"
43#include "function.h"
44#include "emit-rtl.h"
45#include "diagnostic-core.h"
46#include "ggc.h"
47#include "hash-table.h"
48#include "dumpfile.h"
49#include "cselib.h"
50#include "predict.h"
51#include "basic-block.h"
52#include "valtrack.h"
53#include "params.h"
54#include "alloc-pool.h"
55#include "target.h"
56#include "bitmap.h"
57
58/* A list of cselib_val structures.  */
59struct elt_list {
60    struct elt_list *next;
61    cselib_val *elt;
62};
63
64static bool cselib_record_memory;
65static bool cselib_preserve_constants;
66static bool cselib_any_perm_equivs;
67static inline void promote_debug_loc (struct elt_loc_list *l);
68static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
69static void new_elt_loc_list (cselib_val *, rtx);
70static void unchain_one_value (cselib_val *);
71static void unchain_one_elt_list (struct elt_list **);
72static void unchain_one_elt_loc_list (struct elt_loc_list **);
73static void remove_useless_values (void);
74static int rtx_equal_for_cselib_1 (rtx, rtx, machine_mode);
75static unsigned int cselib_hash_rtx (rtx, int, machine_mode);
76static cselib_val *new_cselib_val (unsigned int, machine_mode, rtx);
77static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
78static cselib_val *cselib_lookup_mem (rtx, int);
79static void cselib_invalidate_regno (unsigned int, machine_mode);
80static void cselib_invalidate_mem (rtx);
81static void cselib_record_set (rtx, cselib_val *, cselib_val *);
82static void cselib_record_sets (rtx_insn *);
83
84struct expand_value_data
85{
86  bitmap regs_active;
87  cselib_expand_callback callback;
88  void *callback_arg;
89  bool dummy;
90};
91
92static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int);
93
94/* There are three ways in which cselib can look up an rtx:
95   - for a REG, the reg_values table (which is indexed by regno) is used
96   - for a MEM, we recursively look up its address and then follow the
97     addr_list of that value
98   - for everything else, we compute a hash value and go through the hash
99     table.  Since different rtx's can still have the same hash value,
100     this involves walking the table entries for a given value and comparing
101     the locations of the entries with the rtx we are looking up.  */
102
103struct cselib_hasher : typed_noop_remove <cselib_val>
104{
105  typedef cselib_val value_type;
106  struct compare_type {
107    /* The rtx value and its mode (needed separately for constant
108       integers).  */
109    machine_mode mode;
110    rtx x;
111    /* The mode of the contaning MEM, if any, otherwise VOIDmode.  */
112    machine_mode memmode;
113  };
114  static inline hashval_t hash (const value_type *);
115  static inline bool equal (const value_type *, const compare_type *);
116};
117
118/* The hash function for our hash table.  The value is always computed with
119   cselib_hash_rtx when adding an element; this function just extracts the
120   hash value from a cselib_val structure.  */
121
122inline hashval_t
123cselib_hasher::hash (const value_type *v)
124{
125  return v->hash;
126}
127
128/* The equality test for our hash table.  The first argument V is a table
129   element (i.e. a cselib_val), while the second arg X is an rtx.  We know
130   that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
131   CONST of an appropriate mode.  */
132
133inline bool
134cselib_hasher::equal (const value_type *v, const compare_type *x_arg)
135{
136  struct elt_loc_list *l;
137  rtx x = x_arg->x;
138  machine_mode mode = x_arg->mode;
139  machine_mode memmode = x_arg->memmode;
140
141  if (mode != GET_MODE (v->val_rtx))
142    return false;
143
144  if (GET_CODE (x) == VALUE)
145    return x == v->val_rtx;
146
147  /* We don't guarantee that distinct rtx's have different hash values,
148     so we need to do a comparison.  */
149  for (l = v->locs; l; l = l->next)
150    if (rtx_equal_for_cselib_1 (l->loc, x, memmode))
151      {
152	promote_debug_loc (l);
153	return true;
154      }
155
156  return false;
157}
158
159/* A table that enables us to look up elts by their value.  */
160static hash_table<cselib_hasher> *cselib_hash_table;
161
162/* A table to hold preserved values.  */
163static hash_table<cselib_hasher> *cselib_preserved_hash_table;
164
165/* This is a global so we don't have to pass this through every function.
166   It is used in new_elt_loc_list to set SETTING_INSN.  */
167static rtx_insn *cselib_current_insn;
168
169/* The unique id that the next create value will take.  */
170static unsigned int next_uid;
171
172/* The number of registers we had when the varrays were last resized.  */
173static unsigned int cselib_nregs;
174
175/* Count values without known locations, or with only locations that
176   wouldn't have been known except for debug insns.  Whenever this
177   grows too big, we remove these useless values from the table.
178
179   Counting values with only debug values is a bit tricky.  We don't
180   want to increment n_useless_values when we create a value for a
181   debug insn, for this would get n_useless_values out of sync, but we
182   want increment it if all locs in the list that were ever referenced
183   in nondebug insns are removed from the list.
184
185   In the general case, once we do that, we'd have to stop accepting
186   nondebug expressions in the loc list, to avoid having two values
187   equivalent that, without debug insns, would have been made into
188   separate values.  However, because debug insns never introduce
189   equivalences themselves (no assignments), the only means for
190   growing loc lists is through nondebug assignments.  If the locs
191   also happen to be referenced in debug insns, it will work just fine.
192
193   A consequence of this is that there's at most one debug-only loc in
194   each loc list.  If we keep it in the first entry, testing whether
195   we have a debug-only loc list takes O(1).
196
197   Furthermore, since any additional entry in a loc list containing a
198   debug loc would have to come from an assignment (nondebug) that
199   references both the initial debug loc and the newly-equivalent loc,
200   the initial debug loc would be promoted to a nondebug loc, and the
201   loc list would not contain debug locs any more.
202
203   So the only case we have to be careful with in order to keep
204   n_useless_values in sync between debug and nondebug compilations is
205   to avoid incrementing n_useless_values when removing the single loc
206   from a value that turns out to not appear outside debug values.  We
207   increment n_useless_debug_values instead, and leave such values
208   alone until, for other reasons, we garbage-collect useless
209   values.  */
210static int n_useless_values;
211static int n_useless_debug_values;
212
213/* Count values whose locs have been taken exclusively from debug
214   insns for the entire life of the value.  */
215static int n_debug_values;
216
217/* Number of useless values before we remove them from the hash table.  */
218#define MAX_USELESS_VALUES 32
219
220/* This table maps from register number to values.  It does not
221   contain pointers to cselib_val structures, but rather elt_lists.
222   The purpose is to be able to refer to the same register in
223   different modes.  The first element of the list defines the mode in
224   which the register was set; if the mode is unknown or the value is
225   no longer valid in that mode, ELT will be NULL for the first
226   element.  */
227static struct elt_list **reg_values;
228static unsigned int reg_values_size;
229#define REG_VALUES(i) reg_values[i]
230
231/* The largest number of hard regs used by any entry added to the
232   REG_VALUES table.  Cleared on each cselib_clear_table() invocation.  */
233static unsigned int max_value_regs;
234
235/* Here the set of indices I with REG_VALUES(I) != 0 is saved.  This is used
236   in cselib_clear_table() for fast emptying.  */
237static unsigned int *used_regs;
238static unsigned int n_used_regs;
239
240/* We pass this to cselib_invalidate_mem to invalidate all of
241   memory for a non-const call instruction.  */
242static GTY(()) rtx callmem;
243
244/* Set by discard_useless_locs if it deleted the last location of any
245   value.  */
246static int values_became_useless;
247
248/* Used as stop element of the containing_mem list so we can check
249   presence in the list by checking the next pointer.  */
250static cselib_val dummy_val;
251
252/* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx
253   that is constant through the whole function and should never be
254   eliminated.  */
255static cselib_val *cfa_base_preserved_val;
256static unsigned int cfa_base_preserved_regno = INVALID_REGNUM;
257
258/* Used to list all values that contain memory reference.
259   May or may not contain the useless values - the list is compacted
260   each time memory is invalidated.  */
261static cselib_val *first_containing_mem = &dummy_val;
262static alloc_pool elt_loc_list_pool, elt_list_pool, cselib_val_pool, value_pool;
263
264/* If nonnull, cselib will call this function before freeing useless
265   VALUEs.  A VALUE is deemed useless if its "locs" field is null.  */
266void (*cselib_discard_hook) (cselib_val *);
267
268/* If nonnull, cselib will call this function before recording sets or
269   even clobbering outputs of INSN.  All the recorded sets will be
270   represented in the array sets[n_sets].  new_val_min can be used to
271   tell whether values present in sets are introduced by this
272   instruction.  */
273void (*cselib_record_sets_hook) (rtx_insn *insn, struct cselib_set *sets,
274				 int n_sets);
275
276#define PRESERVED_VALUE_P(RTX) \
277  (RTL_FLAG_CHECK1 ("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
278
279#define SP_BASED_VALUE_P(RTX) \
280  (RTL_FLAG_CHECK1 ("SP_BASED_VALUE_P", (RTX), VALUE)->jump)
281
282
283
284/* Allocate a struct elt_list and fill in its two elements with the
285   arguments.  */
286
287static inline struct elt_list *
288new_elt_list (struct elt_list *next, cselib_val *elt)
289{
290  struct elt_list *el;
291  el = (struct elt_list *) pool_alloc (elt_list_pool);
292  el->next = next;
293  el->elt = elt;
294  return el;
295}
296
297/* Allocate a struct elt_loc_list with LOC and prepend it to VAL's loc
298   list.  */
299
300static inline void
301new_elt_loc_list (cselib_val *val, rtx loc)
302{
303  struct elt_loc_list *el, *next = val->locs;
304
305  gcc_checking_assert (!next || !next->setting_insn
306		       || !DEBUG_INSN_P (next->setting_insn)
307		       || cselib_current_insn == next->setting_insn);
308
309  /* If we're creating the first loc in a debug insn context, we've
310     just created a debug value.  Count it.  */
311  if (!next && cselib_current_insn && DEBUG_INSN_P (cselib_current_insn))
312    n_debug_values++;
313
314  val = canonical_cselib_val (val);
315  next = val->locs;
316
317  if (GET_CODE (loc) == VALUE)
318    {
319      loc = canonical_cselib_val (CSELIB_VAL_PTR (loc))->val_rtx;
320
321      gcc_checking_assert (PRESERVED_VALUE_P (loc)
322			   == PRESERVED_VALUE_P (val->val_rtx));
323
324      if (val->val_rtx == loc)
325	return;
326      else if (val->uid > CSELIB_VAL_PTR (loc)->uid)
327	{
328	  /* Reverse the insertion.  */
329	  new_elt_loc_list (CSELIB_VAL_PTR (loc), val->val_rtx);
330	  return;
331	}
332
333      gcc_checking_assert (val->uid < CSELIB_VAL_PTR (loc)->uid);
334
335      if (CSELIB_VAL_PTR (loc)->locs)
336	{
337	  /* Bring all locs from LOC to VAL.  */
338	  for (el = CSELIB_VAL_PTR (loc)->locs; el->next; el = el->next)
339	    {
340	      /* Adjust values that have LOC as canonical so that VAL
341		 becomes their canonical.  */
342	      if (el->loc && GET_CODE (el->loc) == VALUE)
343		{
344		  gcc_checking_assert (CSELIB_VAL_PTR (el->loc)->locs->loc
345				       == loc);
346		  CSELIB_VAL_PTR (el->loc)->locs->loc = val->val_rtx;
347		}
348	    }
349	  el->next = val->locs;
350	  next = val->locs = CSELIB_VAL_PTR (loc)->locs;
351	}
352
353      if (CSELIB_VAL_PTR (loc)->addr_list)
354	{
355	  /* Bring in addr_list into canonical node.  */
356	  struct elt_list *last = CSELIB_VAL_PTR (loc)->addr_list;
357	  while (last->next)
358	    last = last->next;
359	  last->next = val->addr_list;
360	  val->addr_list = CSELIB_VAL_PTR (loc)->addr_list;
361	  CSELIB_VAL_PTR (loc)->addr_list = NULL;
362	}
363
364      if (CSELIB_VAL_PTR (loc)->next_containing_mem != NULL
365	  && val->next_containing_mem == NULL)
366	{
367	  /* Add VAL to the containing_mem list after LOC.  LOC will
368	     be removed when we notice it doesn't contain any
369	     MEMs.  */
370	  val->next_containing_mem = CSELIB_VAL_PTR (loc)->next_containing_mem;
371	  CSELIB_VAL_PTR (loc)->next_containing_mem = val;
372	}
373
374      /* Chain LOC back to VAL.  */
375      el = (struct elt_loc_list *) pool_alloc (elt_loc_list_pool);
376      el->loc = val->val_rtx;
377      el->setting_insn = cselib_current_insn;
378      el->next = NULL;
379      CSELIB_VAL_PTR (loc)->locs = el;
380    }
381
382  el = (struct elt_loc_list *) pool_alloc (elt_loc_list_pool);
383  el->loc = loc;
384  el->setting_insn = cselib_current_insn;
385  el->next = next;
386  val->locs = el;
387}
388
389/* Promote loc L to a nondebug cselib_current_insn if L is marked as
390   originating from a debug insn, maintaining the debug values
391   count.  */
392
393static inline void
394promote_debug_loc (struct elt_loc_list *l)
395{
396  if (l && l->setting_insn && DEBUG_INSN_P (l->setting_insn)
397      && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
398    {
399      n_debug_values--;
400      l->setting_insn = cselib_current_insn;
401      if (cselib_preserve_constants && l->next)
402	{
403	  gcc_assert (l->next->setting_insn
404		      && DEBUG_INSN_P (l->next->setting_insn)
405		      && !l->next->next);
406	  l->next->setting_insn = cselib_current_insn;
407	}
408      else
409	gcc_assert (!l->next);
410    }
411}
412
413/* The elt_list at *PL is no longer needed.  Unchain it and free its
414   storage.  */
415
416static inline void
417unchain_one_elt_list (struct elt_list **pl)
418{
419  struct elt_list *l = *pl;
420
421  *pl = l->next;
422  pool_free (elt_list_pool, l);
423}
424
425/* Likewise for elt_loc_lists.  */
426
427static void
428unchain_one_elt_loc_list (struct elt_loc_list **pl)
429{
430  struct elt_loc_list *l = *pl;
431
432  *pl = l->next;
433  pool_free (elt_loc_list_pool, l);
434}
435
436/* Likewise for cselib_vals.  This also frees the addr_list associated with
437   V.  */
438
439static void
440unchain_one_value (cselib_val *v)
441{
442  while (v->addr_list)
443    unchain_one_elt_list (&v->addr_list);
444
445  pool_free (cselib_val_pool, v);
446}
447
448/* Remove all entries from the hash table.  Also used during
449   initialization.  */
450
451void
452cselib_clear_table (void)
453{
454  cselib_reset_table (1);
455}
456
457/* Return TRUE if V is a constant, a function invariant or a VALUE
458   equivalence; FALSE otherwise.  */
459
460static bool
461invariant_or_equiv_p (cselib_val *v)
462{
463  struct elt_loc_list *l;
464
465  if (v == cfa_base_preserved_val)
466    return true;
467
468  /* Keep VALUE equivalences around.  */
469  for (l = v->locs; l; l = l->next)
470    if (GET_CODE (l->loc) == VALUE)
471      return true;
472
473  if (v->locs != NULL
474      && v->locs->next == NULL)
475    {
476      if (CONSTANT_P (v->locs->loc)
477	  && (GET_CODE (v->locs->loc) != CONST
478	      || !references_value_p (v->locs->loc, 0)))
479	return true;
480      /* Although a debug expr may be bound to different expressions,
481	 we can preserve it as if it was constant, to get unification
482	 and proper merging within var-tracking.  */
483      if (GET_CODE (v->locs->loc) == DEBUG_EXPR
484	  || GET_CODE (v->locs->loc) == DEBUG_IMPLICIT_PTR
485	  || GET_CODE (v->locs->loc) == ENTRY_VALUE
486	  || GET_CODE (v->locs->loc) == DEBUG_PARAMETER_REF)
487	return true;
488
489      /* (plus (value V) (const_int C)) is invariant iff V is invariant.  */
490      if (GET_CODE (v->locs->loc) == PLUS
491	  && CONST_INT_P (XEXP (v->locs->loc, 1))
492	  && GET_CODE (XEXP (v->locs->loc, 0)) == VALUE
493	  && invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (v->locs->loc, 0))))
494	return true;
495    }
496
497  return false;
498}
499
500/* Remove from hash table all VALUEs except constants, function
501   invariants and VALUE equivalences.  */
502
503int
504preserve_constants_and_equivs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
505{
506  cselib_val *v = *x;
507
508  if (invariant_or_equiv_p (v))
509    {
510      cselib_hasher::compare_type lookup = {
511	GET_MODE (v->val_rtx), v->val_rtx, VOIDmode
512      };
513      cselib_val **slot
514	= cselib_preserved_hash_table->find_slot_with_hash (&lookup,
515							   v->hash, INSERT);
516      gcc_assert (!*slot);
517      *slot = v;
518    }
519
520  cselib_hash_table->clear_slot (x);
521
522  return 1;
523}
524
525/* Remove all entries from the hash table, arranging for the next
526   value to be numbered NUM.  */
527
528void
529cselib_reset_table (unsigned int num)
530{
531  unsigned int i;
532
533  max_value_regs = 0;
534
535  if (cfa_base_preserved_val)
536    {
537      unsigned int regno = cfa_base_preserved_regno;
538      unsigned int new_used_regs = 0;
539      for (i = 0; i < n_used_regs; i++)
540	if (used_regs[i] == regno)
541	  {
542	    new_used_regs = 1;
543	    continue;
544	  }
545	else
546	  REG_VALUES (used_regs[i]) = 0;
547      gcc_assert (new_used_regs == 1);
548      n_used_regs = new_used_regs;
549      used_regs[0] = regno;
550      max_value_regs
551	= hard_regno_nregs[regno][GET_MODE (cfa_base_preserved_val->locs->loc)];
552    }
553  else
554    {
555      for (i = 0; i < n_used_regs; i++)
556	REG_VALUES (used_regs[i]) = 0;
557      n_used_regs = 0;
558    }
559
560  if (cselib_preserve_constants)
561    cselib_hash_table->traverse <void *, preserve_constants_and_equivs>
562      (NULL);
563  else
564    {
565      cselib_hash_table->empty ();
566      gcc_checking_assert (!cselib_any_perm_equivs);
567    }
568
569  n_useless_values = 0;
570  n_useless_debug_values = 0;
571  n_debug_values = 0;
572
573  next_uid = num;
574
575  first_containing_mem = &dummy_val;
576}
577
578/* Return the number of the next value that will be generated.  */
579
580unsigned int
581cselib_get_next_uid (void)
582{
583  return next_uid;
584}
585
586/* Search for X, whose hashcode is HASH, in CSELIB_HASH_TABLE,
587   INSERTing if requested.  When X is part of the address of a MEM,
588   MEMMODE should specify the mode of the MEM.  */
589
590static cselib_val **
591cselib_find_slot (machine_mode mode, rtx x, hashval_t hash,
592		  enum insert_option insert, machine_mode memmode)
593{
594  cselib_val **slot = NULL;
595  cselib_hasher::compare_type lookup = { mode, x, memmode };
596  if (cselib_preserve_constants)
597    slot = cselib_preserved_hash_table->find_slot_with_hash (&lookup, hash,
598							     NO_INSERT);
599  if (!slot)
600    slot = cselib_hash_table->find_slot_with_hash (&lookup, hash, insert);
601  return slot;
602}
603
604/* Return true if X contains a VALUE rtx.  If ONLY_USELESS is set, we
605   only return true for values which point to a cselib_val whose value
606   element has been set to zero, which implies the cselib_val will be
607   removed.  */
608
609int
610references_value_p (const_rtx x, int only_useless)
611{
612  const enum rtx_code code = GET_CODE (x);
613  const char *fmt = GET_RTX_FORMAT (code);
614  int i, j;
615
616  if (GET_CODE (x) == VALUE
617      && (! only_useless ||
618	  (CSELIB_VAL_PTR (x)->locs == 0 && !PRESERVED_VALUE_P (x))))
619    return 1;
620
621  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
622    {
623      if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
624	return 1;
625      else if (fmt[i] == 'E')
626	for (j = 0; j < XVECLEN (x, i); j++)
627	  if (references_value_p (XVECEXP (x, i, j), only_useless))
628	    return 1;
629    }
630
631  return 0;
632}
633
634/* For all locations found in X, delete locations that reference useless
635   values (i.e. values without any location).  Called through
636   htab_traverse.  */
637
638int
639discard_useless_locs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
640{
641  cselib_val *v = *x;
642  struct elt_loc_list **p = &v->locs;
643  bool had_locs = v->locs != NULL;
644  rtx setting_insn = v->locs ? v->locs->setting_insn : NULL;
645
646  while (*p)
647    {
648      if (references_value_p ((*p)->loc, 1))
649	unchain_one_elt_loc_list (p);
650      else
651	p = &(*p)->next;
652    }
653
654  if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
655    {
656      if (setting_insn && DEBUG_INSN_P (setting_insn))
657	n_useless_debug_values++;
658      else
659	n_useless_values++;
660      values_became_useless = 1;
661    }
662  return 1;
663}
664
665/* If X is a value with no locations, remove it from the hashtable.  */
666
667int
668discard_useless_values (cselib_val **x, void *info ATTRIBUTE_UNUSED)
669{
670  cselib_val *v = *x;
671
672  if (v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
673    {
674      if (cselib_discard_hook)
675	cselib_discard_hook (v);
676
677      CSELIB_VAL_PTR (v->val_rtx) = NULL;
678      cselib_hash_table->clear_slot (x);
679      unchain_one_value (v);
680      n_useless_values--;
681    }
682
683  return 1;
684}
685
686/* Clean out useless values (i.e. those which no longer have locations
687   associated with them) from the hash table.  */
688
689static void
690remove_useless_values (void)
691{
692  cselib_val **p, *v;
693
694  /* First pass: eliminate locations that reference the value.  That in
695     turn can make more values useless.  */
696  do
697    {
698      values_became_useless = 0;
699      cselib_hash_table->traverse <void *, discard_useless_locs> (NULL);
700    }
701  while (values_became_useless);
702
703  /* Second pass: actually remove the values.  */
704
705  p = &first_containing_mem;
706  for (v = *p; v != &dummy_val; v = v->next_containing_mem)
707    if (v->locs && v == canonical_cselib_val (v))
708      {
709	*p = v;
710	p = &(*p)->next_containing_mem;
711      }
712  *p = &dummy_val;
713
714  n_useless_values += n_useless_debug_values;
715  n_debug_values -= n_useless_debug_values;
716  n_useless_debug_values = 0;
717
718  cselib_hash_table->traverse <void *, discard_useless_values> (NULL);
719
720  gcc_assert (!n_useless_values);
721}
722
723/* Arrange for a value to not be removed from the hash table even if
724   it becomes useless.  */
725
726void
727cselib_preserve_value (cselib_val *v)
728{
729  PRESERVED_VALUE_P (v->val_rtx) = 1;
730}
731
732/* Test whether a value is preserved.  */
733
734bool
735cselib_preserved_value_p (cselib_val *v)
736{
737  return PRESERVED_VALUE_P (v->val_rtx);
738}
739
740/* Arrange for a REG value to be assumed constant through the whole function,
741   never invalidated and preserved across cselib_reset_table calls.  */
742
743void
744cselib_preserve_cfa_base_value (cselib_val *v, unsigned int regno)
745{
746  if (cselib_preserve_constants
747      && v->locs
748      && REG_P (v->locs->loc))
749    {
750      cfa_base_preserved_val = v;
751      cfa_base_preserved_regno = regno;
752    }
753}
754
755/* Clean all non-constant expressions in the hash table, but retain
756   their values.  */
757
758void
759cselib_preserve_only_values (void)
760{
761  int i;
762
763  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
764    cselib_invalidate_regno (i, reg_raw_mode[i]);
765
766  cselib_invalidate_mem (callmem);
767
768  remove_useless_values ();
769
770  gcc_assert (first_containing_mem == &dummy_val);
771}
772
773/* Arrange for a value to be marked as based on stack pointer
774   for find_base_term purposes.  */
775
776void
777cselib_set_value_sp_based (cselib_val *v)
778{
779  SP_BASED_VALUE_P (v->val_rtx) = 1;
780}
781
782/* Test whether a value is based on stack pointer for
783   find_base_term purposes.  */
784
785bool
786cselib_sp_based_value_p (cselib_val *v)
787{
788  return SP_BASED_VALUE_P (v->val_rtx);
789}
790
791/* Return the mode in which a register was last set.  If X is not a
792   register, return its mode.  If the mode in which the register was
793   set is not known, or the value was already clobbered, return
794   VOIDmode.  */
795
796machine_mode
797cselib_reg_set_mode (const_rtx x)
798{
799  if (!REG_P (x))
800    return GET_MODE (x);
801
802  if (REG_VALUES (REGNO (x)) == NULL
803      || REG_VALUES (REGNO (x))->elt == NULL)
804    return VOIDmode;
805
806  return GET_MODE (REG_VALUES (REGNO (x))->elt->val_rtx);
807}
808
809/* Return nonzero if we can prove that X and Y contain the same value, taking
810   our gathered information into account.  */
811
812int
813rtx_equal_for_cselib_p (rtx x, rtx y)
814{
815  return rtx_equal_for_cselib_1 (x, y, VOIDmode);
816}
817
818/* If x is a PLUS or an autoinc operation, expand the operation,
819   storing the offset, if any, in *OFF.  */
820
821static rtx
822autoinc_split (rtx x, rtx *off, machine_mode memmode)
823{
824  switch (GET_CODE (x))
825    {
826    case PLUS:
827      *off = XEXP (x, 1);
828      return XEXP (x, 0);
829
830    case PRE_DEC:
831      if (memmode == VOIDmode)
832	return x;
833
834      *off = GEN_INT (-GET_MODE_SIZE (memmode));
835      return XEXP (x, 0);
836      break;
837
838    case PRE_INC:
839      if (memmode == VOIDmode)
840	return x;
841
842      *off = GEN_INT (GET_MODE_SIZE (memmode));
843      return XEXP (x, 0);
844
845    case PRE_MODIFY:
846      return XEXP (x, 1);
847
848    case POST_DEC:
849    case POST_INC:
850    case POST_MODIFY:
851      return XEXP (x, 0);
852
853    default:
854      return x;
855    }
856}
857
858/* Return nonzero if we can prove that X and Y contain the same value,
859   taking our gathered information into account.  MEMMODE holds the
860   mode of the enclosing MEM, if any, as required to deal with autoinc
861   addressing modes.  If X and Y are not (known to be) part of
862   addresses, MEMMODE should be VOIDmode.  */
863
864static int
865rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode memmode)
866{
867  enum rtx_code code;
868  const char *fmt;
869  int i;
870
871  if (REG_P (x) || MEM_P (x))
872    {
873      cselib_val *e = cselib_lookup (x, GET_MODE (x), 0, memmode);
874
875      if (e)
876	x = e->val_rtx;
877    }
878
879  if (REG_P (y) || MEM_P (y))
880    {
881      cselib_val *e = cselib_lookup (y, GET_MODE (y), 0, memmode);
882
883      if (e)
884	y = e->val_rtx;
885    }
886
887  if (x == y)
888    return 1;
889
890  if (GET_CODE (x) == VALUE)
891    {
892      cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (x));
893      struct elt_loc_list *l;
894
895      if (GET_CODE (y) == VALUE)
896	return e == canonical_cselib_val (CSELIB_VAL_PTR (y));
897
898      for (l = e->locs; l; l = l->next)
899	{
900	  rtx t = l->loc;
901
902	  /* Avoid infinite recursion.  We know we have the canonical
903	     value, so we can just skip any values in the equivalence
904	     list.  */
905	  if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
906	    continue;
907	  else if (rtx_equal_for_cselib_1 (t, y, memmode))
908	    return 1;
909	}
910
911      return 0;
912    }
913  else if (GET_CODE (y) == VALUE)
914    {
915      cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (y));
916      struct elt_loc_list *l;
917
918      for (l = e->locs; l; l = l->next)
919	{
920	  rtx t = l->loc;
921
922	  if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
923	    continue;
924	  else if (rtx_equal_for_cselib_1 (x, t, memmode))
925	    return 1;
926	}
927
928      return 0;
929    }
930
931  if (GET_MODE (x) != GET_MODE (y))
932    return 0;
933
934  if (GET_CODE (x) != GET_CODE (y))
935    {
936      rtx xorig = x, yorig = y;
937      rtx xoff = NULL, yoff = NULL;
938
939      x = autoinc_split (x, &xoff, memmode);
940      y = autoinc_split (y, &yoff, memmode);
941
942      if (!xoff != !yoff)
943	return 0;
944
945      if (xoff && !rtx_equal_for_cselib_1 (xoff, yoff, memmode))
946	return 0;
947
948      /* Don't recurse if nothing changed.  */
949      if (x != xorig || y != yorig)
950	return rtx_equal_for_cselib_1 (x, y, memmode);
951
952      return 0;
953    }
954
955  /* These won't be handled correctly by the code below.  */
956  switch (GET_CODE (x))
957    {
958    CASE_CONST_UNIQUE:
959    case DEBUG_EXPR:
960      return 0;
961
962    case DEBUG_IMPLICIT_PTR:
963      return DEBUG_IMPLICIT_PTR_DECL (x)
964	     == DEBUG_IMPLICIT_PTR_DECL (y);
965
966    case DEBUG_PARAMETER_REF:
967      return DEBUG_PARAMETER_REF_DECL (x)
968	     == DEBUG_PARAMETER_REF_DECL (y);
969
970    case ENTRY_VALUE:
971      /* ENTRY_VALUEs are function invariant, it is thus undesirable to
972	 use rtx_equal_for_cselib_1 to compare the operands.  */
973      return rtx_equal_p (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
974
975    case LABEL_REF:
976      return LABEL_REF_LABEL (x) == LABEL_REF_LABEL (y);
977
978    case MEM:
979      /* We have to compare any autoinc operations in the addresses
980	 using this MEM's mode.  */
981      return rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 0), GET_MODE (x));
982
983    default:
984      break;
985    }
986
987  code = GET_CODE (x);
988  fmt = GET_RTX_FORMAT (code);
989
990  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
991    {
992      int j;
993
994      switch (fmt[i])
995	{
996	case 'w':
997	  if (XWINT (x, i) != XWINT (y, i))
998	    return 0;
999	  break;
1000
1001	case 'n':
1002	case 'i':
1003	  if (XINT (x, i) != XINT (y, i))
1004	    return 0;
1005	  break;
1006
1007	case 'V':
1008	case 'E':
1009	  /* Two vectors must have the same length.  */
1010	  if (XVECLEN (x, i) != XVECLEN (y, i))
1011	    return 0;
1012
1013	  /* And the corresponding elements must match.  */
1014	  for (j = 0; j < XVECLEN (x, i); j++)
1015	    if (! rtx_equal_for_cselib_1 (XVECEXP (x, i, j),
1016					  XVECEXP (y, i, j), memmode))
1017	      return 0;
1018	  break;
1019
1020	case 'e':
1021	  if (i == 1
1022	      && targetm.commutative_p (x, UNKNOWN)
1023	      && rtx_equal_for_cselib_1 (XEXP (x, 1), XEXP (y, 0), memmode)
1024	      && rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 1), memmode))
1025	    return 1;
1026	  if (! rtx_equal_for_cselib_1 (XEXP (x, i), XEXP (y, i), memmode))
1027	    return 0;
1028	  break;
1029
1030	case 'S':
1031	case 's':
1032	  if (strcmp (XSTR (x, i), XSTR (y, i)))
1033	    return 0;
1034	  break;
1035
1036	case 'u':
1037	  /* These are just backpointers, so they don't matter.  */
1038	  break;
1039
1040	case '0':
1041	case 't':
1042	  break;
1043
1044	  /* It is believed that rtx's at this level will never
1045	     contain anything but integers and other rtx's,
1046	     except for within LABEL_REFs and SYMBOL_REFs.  */
1047	default:
1048	  gcc_unreachable ();
1049	}
1050    }
1051  return 1;
1052}
1053
1054/* Hash an rtx.  Return 0 if we couldn't hash the rtx.
1055   For registers and memory locations, we look up their cselib_val structure
1056   and return its VALUE element.
1057   Possible reasons for return 0 are: the object is volatile, or we couldn't
1058   find a register or memory location in the table and CREATE is zero.  If
1059   CREATE is nonzero, table elts are created for regs and mem.
1060   N.B. this hash function returns the same hash value for RTXes that
1061   differ only in the order of operands, thus it is suitable for comparisons
1062   that take commutativity into account.
1063   If we wanted to also support associative rules, we'd have to use a different
1064   strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) .
1065   MEMMODE indicates the mode of an enclosing MEM, and it's only
1066   used to compute autoinc values.
1067   We used to have a MODE argument for hashing for CONST_INTs, but that
1068   didn't make sense, since it caused spurious hash differences between
1069    (set (reg:SI 1) (const_int))
1070    (plus:SI (reg:SI 2) (reg:SI 1))
1071   and
1072    (plus:SI (reg:SI 2) (const_int))
1073   If the mode is important in any context, it must be checked specifically
1074   in a comparison anyway, since relying on hash differences is unsafe.  */
1075
1076static unsigned int
1077cselib_hash_rtx (rtx x, int create, machine_mode memmode)
1078{
1079  cselib_val *e;
1080  int i, j;
1081  enum rtx_code code;
1082  const char *fmt;
1083  unsigned int hash = 0;
1084
1085  code = GET_CODE (x);
1086  hash += (unsigned) code + (unsigned) GET_MODE (x);
1087
1088  switch (code)
1089    {
1090    case VALUE:
1091      e = CSELIB_VAL_PTR (x);
1092      return e->hash;
1093
1094    case MEM:
1095    case REG:
1096      e = cselib_lookup (x, GET_MODE (x), create, memmode);
1097      if (! e)
1098	return 0;
1099
1100      return e->hash;
1101
1102    case DEBUG_EXPR:
1103      hash += ((unsigned) DEBUG_EXPR << 7)
1104	      + DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x));
1105      return hash ? hash : (unsigned int) DEBUG_EXPR;
1106
1107    case DEBUG_IMPLICIT_PTR:
1108      hash += ((unsigned) DEBUG_IMPLICIT_PTR << 7)
1109	      + DECL_UID (DEBUG_IMPLICIT_PTR_DECL (x));
1110      return hash ? hash : (unsigned int) DEBUG_IMPLICIT_PTR;
1111
1112    case DEBUG_PARAMETER_REF:
1113      hash += ((unsigned) DEBUG_PARAMETER_REF << 7)
1114	      + DECL_UID (DEBUG_PARAMETER_REF_DECL (x));
1115      return hash ? hash : (unsigned int) DEBUG_PARAMETER_REF;
1116
1117    case ENTRY_VALUE:
1118      /* ENTRY_VALUEs are function invariant, thus try to avoid
1119	 recursing on argument if ENTRY_VALUE is one of the
1120	 forms emitted by expand_debug_expr, otherwise
1121	 ENTRY_VALUE hash would depend on the current value
1122	 in some register or memory.  */
1123      if (REG_P (ENTRY_VALUE_EXP (x)))
1124	hash += (unsigned int) REG
1125		+ (unsigned int) GET_MODE (ENTRY_VALUE_EXP (x))
1126		+ (unsigned int) REGNO (ENTRY_VALUE_EXP (x));
1127      else if (MEM_P (ENTRY_VALUE_EXP (x))
1128	       && REG_P (XEXP (ENTRY_VALUE_EXP (x), 0)))
1129	hash += (unsigned int) MEM
1130		+ (unsigned int) GET_MODE (XEXP (ENTRY_VALUE_EXP (x), 0))
1131		+ (unsigned int) REGNO (XEXP (ENTRY_VALUE_EXP (x), 0));
1132      else
1133	hash += cselib_hash_rtx (ENTRY_VALUE_EXP (x), create, memmode);
1134      return hash ? hash : (unsigned int) ENTRY_VALUE;
1135
1136    case CONST_INT:
1137      hash += ((unsigned) CONST_INT << 7) + UINTVAL (x);
1138      return hash ? hash : (unsigned int) CONST_INT;
1139
1140    case CONST_WIDE_INT:
1141      for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
1142	hash += CONST_WIDE_INT_ELT (x, i);
1143      return hash;
1144
1145    case CONST_DOUBLE:
1146      /* This is like the general case, except that it only counts
1147	 the integers representing the constant.  */
1148      hash += (unsigned) code + (unsigned) GET_MODE (x);
1149      if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (x) == VOIDmode)
1150	hash += ((unsigned) CONST_DOUBLE_LOW (x)
1151		 + (unsigned) CONST_DOUBLE_HIGH (x));
1152      else
1153	hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
1154      return hash ? hash : (unsigned int) CONST_DOUBLE;
1155
1156    case CONST_FIXED:
1157      hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1158      hash += fixed_hash (CONST_FIXED_VALUE (x));
1159      return hash ? hash : (unsigned int) CONST_FIXED;
1160
1161    case CONST_VECTOR:
1162      {
1163	int units;
1164	rtx elt;
1165
1166	units = CONST_VECTOR_NUNITS (x);
1167
1168	for (i = 0; i < units; ++i)
1169	  {
1170	    elt = CONST_VECTOR_ELT (x, i);
1171	    hash += cselib_hash_rtx (elt, 0, memmode);
1172	  }
1173
1174	return hash;
1175      }
1176
1177      /* Assume there is only one rtx object for any given label.  */
1178    case LABEL_REF:
1179      /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1180	 differences and differences between each stage's debugging dumps.  */
1181      hash += (((unsigned int) LABEL_REF << 7)
1182	       + CODE_LABEL_NUMBER (LABEL_REF_LABEL (x)));
1183      return hash ? hash : (unsigned int) LABEL_REF;
1184
1185    case SYMBOL_REF:
1186      {
1187	/* Don't hash on the symbol's address to avoid bootstrap differences.
1188	   Different hash values may cause expressions to be recorded in
1189	   different orders and thus different registers to be used in the
1190	   final assembler.  This also avoids differences in the dump files
1191	   between various stages.  */
1192	unsigned int h = 0;
1193	const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1194
1195	while (*p)
1196	  h += (h << 7) + *p++; /* ??? revisit */
1197
1198	hash += ((unsigned int) SYMBOL_REF << 7) + h;
1199	return hash ? hash : (unsigned int) SYMBOL_REF;
1200      }
1201
1202    case PRE_DEC:
1203    case PRE_INC:
1204      /* We can't compute these without knowing the MEM mode.  */
1205      gcc_assert (memmode != VOIDmode);
1206      i = GET_MODE_SIZE (memmode);
1207      if (code == PRE_DEC)
1208	i = -i;
1209      /* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes
1210	 like (mem:MEMMODE (plus (reg) (const_int I))).  */
1211      hash += (unsigned) PLUS - (unsigned)code
1212	+ cselib_hash_rtx (XEXP (x, 0), create, memmode)
1213	+ cselib_hash_rtx (GEN_INT (i), create, memmode);
1214      return hash ? hash : 1 + (unsigned) PLUS;
1215
1216    case PRE_MODIFY:
1217      gcc_assert (memmode != VOIDmode);
1218      return cselib_hash_rtx (XEXP (x, 1), create, memmode);
1219
1220    case POST_DEC:
1221    case POST_INC:
1222    case POST_MODIFY:
1223      gcc_assert (memmode != VOIDmode);
1224      return cselib_hash_rtx (XEXP (x, 0), create, memmode);
1225
1226    case PC:
1227    case CC0:
1228    case CALL:
1229    case UNSPEC_VOLATILE:
1230      return 0;
1231
1232    case ASM_OPERANDS:
1233      if (MEM_VOLATILE_P (x))
1234	return 0;
1235
1236      break;
1237
1238    default:
1239      break;
1240    }
1241
1242  i = GET_RTX_LENGTH (code) - 1;
1243  fmt = GET_RTX_FORMAT (code);
1244  for (; i >= 0; i--)
1245    {
1246      switch (fmt[i])
1247	{
1248	case 'e':
1249	  {
1250	    rtx tem = XEXP (x, i);
1251	    unsigned int tem_hash = cselib_hash_rtx (tem, create, memmode);
1252
1253	    if (tem_hash == 0)
1254	      return 0;
1255
1256	    hash += tem_hash;
1257	  }
1258	  break;
1259	case 'E':
1260	  for (j = 0; j < XVECLEN (x, i); j++)
1261	    {
1262	      unsigned int tem_hash
1263		= cselib_hash_rtx (XVECEXP (x, i, j), create, memmode);
1264
1265	      if (tem_hash == 0)
1266		return 0;
1267
1268	      hash += tem_hash;
1269	    }
1270	  break;
1271
1272	case 's':
1273	  {
1274	    const unsigned char *p = (const unsigned char *) XSTR (x, i);
1275
1276	    if (p)
1277	      while (*p)
1278		hash += *p++;
1279	    break;
1280	  }
1281
1282	case 'i':
1283	  hash += XINT (x, i);
1284	  break;
1285
1286	case '0':
1287	case 't':
1288	  /* unused */
1289	  break;
1290
1291	default:
1292	  gcc_unreachable ();
1293	}
1294    }
1295
1296  return hash ? hash : 1 + (unsigned int) GET_CODE (x);
1297}
1298
1299/* Create a new value structure for VALUE and initialize it.  The mode of the
1300   value is MODE.  */
1301
1302static inline cselib_val *
1303new_cselib_val (unsigned int hash, machine_mode mode, rtx x)
1304{
1305  cselib_val *e = (cselib_val *) pool_alloc (cselib_val_pool);
1306
1307  gcc_assert (hash);
1308  gcc_assert (next_uid);
1309
1310  e->hash = hash;
1311  e->uid = next_uid++;
1312  /* We use an alloc pool to allocate this RTL construct because it
1313     accounts for about 8% of the overall memory usage.  We know
1314     precisely when we can have VALUE RTXen (when cselib is active)
1315     so we don't need to put them in garbage collected memory.
1316     ??? Why should a VALUE be an RTX in the first place?  */
1317  e->val_rtx = (rtx) pool_alloc (value_pool);
1318  memset (e->val_rtx, 0, RTX_HDR_SIZE);
1319  PUT_CODE (e->val_rtx, VALUE);
1320  PUT_MODE (e->val_rtx, mode);
1321  CSELIB_VAL_PTR (e->val_rtx) = e;
1322  e->addr_list = 0;
1323  e->locs = 0;
1324  e->next_containing_mem = 0;
1325
1326  if (dump_file && (dump_flags & TDF_CSELIB))
1327    {
1328      fprintf (dump_file, "cselib value %u:%u ", e->uid, hash);
1329      if (flag_dump_noaddr || flag_dump_unnumbered)
1330	fputs ("# ", dump_file);
1331      else
1332	fprintf (dump_file, "%p ", (void*)e);
1333      print_rtl_single (dump_file, x);
1334      fputc ('\n', dump_file);
1335    }
1336
1337  return e;
1338}
1339
1340/* ADDR_ELT is a value that is used as address.  MEM_ELT is the value that
1341   contains the data at this address.  X is a MEM that represents the
1342   value.  Update the two value structures to represent this situation.  */
1343
1344static void
1345add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x)
1346{
1347  struct elt_loc_list *l;
1348
1349  addr_elt = canonical_cselib_val (addr_elt);
1350  mem_elt = canonical_cselib_val (mem_elt);
1351
1352  /* Avoid duplicates.  */
1353  for (l = mem_elt->locs; l; l = l->next)
1354    if (MEM_P (l->loc)
1355	&& CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt)
1356      {
1357	promote_debug_loc (l);
1358	return;
1359      }
1360
1361  addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
1362  new_elt_loc_list (mem_elt,
1363		    replace_equiv_address_nv (x, addr_elt->val_rtx));
1364  if (mem_elt->next_containing_mem == NULL)
1365    {
1366      mem_elt->next_containing_mem = first_containing_mem;
1367      first_containing_mem = mem_elt;
1368    }
1369}
1370
1371/* Subroutine of cselib_lookup.  Return a value for X, which is a MEM rtx.
1372   If CREATE, make a new one if we haven't seen it before.  */
1373
1374static cselib_val *
1375cselib_lookup_mem (rtx x, int create)
1376{
1377  machine_mode mode = GET_MODE (x);
1378  machine_mode addr_mode;
1379  cselib_val **slot;
1380  cselib_val *addr;
1381  cselib_val *mem_elt;
1382  struct elt_list *l;
1383
1384  if (MEM_VOLATILE_P (x) || mode == BLKmode
1385      || !cselib_record_memory
1386      || (FLOAT_MODE_P (mode) && flag_float_store))
1387    return 0;
1388
1389  addr_mode = GET_MODE (XEXP (x, 0));
1390  if (addr_mode == VOIDmode)
1391    addr_mode = Pmode;
1392
1393  /* Look up the value for the address.  */
1394  addr = cselib_lookup (XEXP (x, 0), addr_mode, create, mode);
1395  if (! addr)
1396    return 0;
1397
1398  addr = canonical_cselib_val (addr);
1399  /* Find a value that describes a value of our mode at that address.  */
1400  for (l = addr->addr_list; l; l = l->next)
1401    if (GET_MODE (l->elt->val_rtx) == mode)
1402      {
1403	promote_debug_loc (l->elt->locs);
1404	return l->elt;
1405      }
1406
1407  if (! create)
1408    return 0;
1409
1410  mem_elt = new_cselib_val (next_uid, mode, x);
1411  add_mem_for_addr (addr, mem_elt, x);
1412  slot = cselib_find_slot (mode, x, mem_elt->hash, INSERT, VOIDmode);
1413  *slot = mem_elt;
1414  return mem_elt;
1415}
1416
1417/* Search through the possible substitutions in P.  We prefer a non reg
1418   substitution because this allows us to expand the tree further.  If
1419   we find, just a reg, take the lowest regno.  There may be several
1420   non-reg results, we just take the first one because they will all
1421   expand to the same place.  */
1422
1423static rtx
1424expand_loc (struct elt_loc_list *p, struct expand_value_data *evd,
1425	    int max_depth)
1426{
1427  rtx reg_result = NULL;
1428  unsigned int regno = UINT_MAX;
1429  struct elt_loc_list *p_in = p;
1430
1431  for (; p; p = p->next)
1432    {
1433      /* Return these right away to avoid returning stack pointer based
1434	 expressions for frame pointer and vice versa, which is something
1435	 that would confuse DSE.  See the comment in cselib_expand_value_rtx_1
1436	 for more details.  */
1437      if (REG_P (p->loc)
1438	  && (REGNO (p->loc) == STACK_POINTER_REGNUM
1439	      || REGNO (p->loc) == FRAME_POINTER_REGNUM
1440	      || REGNO (p->loc) == HARD_FRAME_POINTER_REGNUM
1441	      || REGNO (p->loc) == cfa_base_preserved_regno))
1442	return p->loc;
1443      /* Avoid infinite recursion trying to expand a reg into a
1444	 the same reg.  */
1445      if ((REG_P (p->loc))
1446	  && (REGNO (p->loc) < regno)
1447	  && !bitmap_bit_p (evd->regs_active, REGNO (p->loc)))
1448	{
1449	  reg_result = p->loc;
1450	  regno = REGNO (p->loc);
1451	}
1452      /* Avoid infinite recursion and do not try to expand the
1453	 value.  */
1454      else if (GET_CODE (p->loc) == VALUE
1455	       && CSELIB_VAL_PTR (p->loc)->locs == p_in)
1456	continue;
1457      else if (!REG_P (p->loc))
1458	{
1459	  rtx result, note;
1460	  if (dump_file && (dump_flags & TDF_CSELIB))
1461	    {
1462	      print_inline_rtx (dump_file, p->loc, 0);
1463	      fprintf (dump_file, "\n");
1464	    }
1465	  if (GET_CODE (p->loc) == LO_SUM
1466	      && GET_CODE (XEXP (p->loc, 1)) == SYMBOL_REF
1467	      && p->setting_insn
1468	      && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX))
1469	      && XEXP (note, 0) == XEXP (p->loc, 1))
1470	    return XEXP (p->loc, 1);
1471	  result = cselib_expand_value_rtx_1 (p->loc, evd, max_depth - 1);
1472	  if (result)
1473	    return result;
1474	}
1475
1476    }
1477
1478  if (regno != UINT_MAX)
1479    {
1480      rtx result;
1481      if (dump_file && (dump_flags & TDF_CSELIB))
1482	fprintf (dump_file, "r%d\n", regno);
1483
1484      result = cselib_expand_value_rtx_1 (reg_result, evd, max_depth - 1);
1485      if (result)
1486	return result;
1487    }
1488
1489  if (dump_file && (dump_flags & TDF_CSELIB))
1490    {
1491      if (reg_result)
1492	{
1493	  print_inline_rtx (dump_file, reg_result, 0);
1494	  fprintf (dump_file, "\n");
1495	}
1496      else
1497	fprintf (dump_file, "NULL\n");
1498    }
1499  return reg_result;
1500}
1501
1502
1503/* Forward substitute and expand an expression out to its roots.
1504   This is the opposite of common subexpression.  Because local value
1505   numbering is such a weak optimization, the expanded expression is
1506   pretty much unique (not from a pointer equals point of view but
1507   from a tree shape point of view.
1508
1509   This function returns NULL if the expansion fails.  The expansion
1510   will fail if there is no value number for one of the operands or if
1511   one of the operands has been overwritten between the current insn
1512   and the beginning of the basic block.  For instance x has no
1513   expansion in:
1514
1515   r1 <- r1 + 3
1516   x <- r1 + 8
1517
1518   REGS_ACTIVE is a scratch bitmap that should be clear when passing in.
1519   It is clear on return.  */
1520
1521rtx
1522cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth)
1523{
1524  struct expand_value_data evd;
1525
1526  evd.regs_active = regs_active;
1527  evd.callback = NULL;
1528  evd.callback_arg = NULL;
1529  evd.dummy = false;
1530
1531  return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1532}
1533
1534/* Same as cselib_expand_value_rtx, but using a callback to try to
1535   resolve some expressions.  The CB function should return ORIG if it
1536   can't or does not want to deal with a certain RTX.  Any other
1537   return value, including NULL, will be used as the expansion for
1538   VALUE, without any further changes.  */
1539
1540rtx
1541cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1542			    cselib_expand_callback cb, void *data)
1543{
1544  struct expand_value_data evd;
1545
1546  evd.regs_active = regs_active;
1547  evd.callback = cb;
1548  evd.callback_arg = data;
1549  evd.dummy = false;
1550
1551  return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1552}
1553
1554/* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied
1555   or simplified.  Useful to find out whether cselib_expand_value_rtx_cb
1556   would return NULL or non-NULL, without allocating new rtx.  */
1557
1558bool
1559cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1560				  cselib_expand_callback cb, void *data)
1561{
1562  struct expand_value_data evd;
1563
1564  evd.regs_active = regs_active;
1565  evd.callback = cb;
1566  evd.callback_arg = data;
1567  evd.dummy = true;
1568
1569  return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULL;
1570}
1571
1572/* Internal implementation of cselib_expand_value_rtx and
1573   cselib_expand_value_rtx_cb.  */
1574
1575static rtx
1576cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd,
1577			   int max_depth)
1578{
1579  rtx copy, scopy;
1580  int i, j;
1581  RTX_CODE code;
1582  const char *format_ptr;
1583  machine_mode mode;
1584
1585  code = GET_CODE (orig);
1586
1587  /* For the context of dse, if we end up expand into a huge tree, we
1588     will not have a useful address, so we might as well just give up
1589     quickly.  */
1590  if (max_depth <= 0)
1591    return NULL;
1592
1593  switch (code)
1594    {
1595    case REG:
1596      {
1597	struct elt_list *l = REG_VALUES (REGNO (orig));
1598
1599	if (l && l->elt == NULL)
1600	  l = l->next;
1601	for (; l; l = l->next)
1602	  if (GET_MODE (l->elt->val_rtx) == GET_MODE (orig))
1603	    {
1604	      rtx result;
1605	      unsigned regno = REGNO (orig);
1606
1607	      /* The only thing that we are not willing to do (this
1608		 is requirement of dse and if others potential uses
1609		 need this function we should add a parm to control
1610		 it) is that we will not substitute the
1611		 STACK_POINTER_REGNUM, FRAME_POINTER or the
1612		 HARD_FRAME_POINTER.
1613
1614		 These expansions confuses the code that notices that
1615		 stores into the frame go dead at the end of the
1616		 function and that the frame is not effected by calls
1617		 to subroutines.  If you allow the
1618		 STACK_POINTER_REGNUM substitution, then dse will
1619		 think that parameter pushing also goes dead which is
1620		 wrong.  If you allow the FRAME_POINTER or the
1621		 HARD_FRAME_POINTER then you lose the opportunity to
1622		 make the frame assumptions.  */
1623	      if (regno == STACK_POINTER_REGNUM
1624		  || regno == FRAME_POINTER_REGNUM
1625		  || regno == HARD_FRAME_POINTER_REGNUM
1626		  || regno == cfa_base_preserved_regno)
1627		return orig;
1628
1629	      bitmap_set_bit (evd->regs_active, regno);
1630
1631	      if (dump_file && (dump_flags & TDF_CSELIB))
1632		fprintf (dump_file, "expanding: r%d into: ", regno);
1633
1634	      result = expand_loc (l->elt->locs, evd, max_depth);
1635	      bitmap_clear_bit (evd->regs_active, regno);
1636
1637	      if (result)
1638		return result;
1639	      else
1640		return orig;
1641	    }
1642      }
1643
1644    CASE_CONST_ANY:
1645    case SYMBOL_REF:
1646    case CODE_LABEL:
1647    case PC:
1648    case CC0:
1649    case SCRATCH:
1650      /* SCRATCH must be shared because they represent distinct values.  */
1651      return orig;
1652    case CLOBBER:
1653      if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))))
1654	return orig;
1655      break;
1656
1657    case CONST:
1658      if (shared_const_p (orig))
1659	return orig;
1660      break;
1661
1662    case SUBREG:
1663      {
1664	rtx subreg;
1665
1666	if (evd->callback)
1667	  {
1668	    subreg = evd->callback (orig, evd->regs_active, max_depth,
1669				    evd->callback_arg);
1670	    if (subreg != orig)
1671	      return subreg;
1672	  }
1673
1674	subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd,
1675					    max_depth - 1);
1676	if (!subreg)
1677	  return NULL;
1678	scopy = simplify_gen_subreg (GET_MODE (orig), subreg,
1679				     GET_MODE (SUBREG_REG (orig)),
1680				     SUBREG_BYTE (orig));
1681	if (scopy == NULL
1682	    || (GET_CODE (scopy) == SUBREG
1683		&& !REG_P (SUBREG_REG (scopy))
1684		&& !MEM_P (SUBREG_REG (scopy))))
1685	  return NULL;
1686
1687	return scopy;
1688      }
1689
1690    case VALUE:
1691      {
1692	rtx result;
1693
1694	if (dump_file && (dump_flags & TDF_CSELIB))
1695	  {
1696	    fputs ("\nexpanding ", dump_file);
1697	    print_rtl_single (dump_file, orig);
1698	    fputs (" into...", dump_file);
1699	  }
1700
1701	if (evd->callback)
1702	  {
1703	    result = evd->callback (orig, evd->regs_active, max_depth,
1704				    evd->callback_arg);
1705
1706	    if (result != orig)
1707	      return result;
1708	  }
1709
1710	result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth);
1711	return result;
1712      }
1713
1714    case DEBUG_EXPR:
1715      if (evd->callback)
1716	return evd->callback (orig, evd->regs_active, max_depth,
1717			      evd->callback_arg);
1718      return orig;
1719
1720    default:
1721      break;
1722    }
1723
1724  /* Copy the various flags, fields, and other information.  We assume
1725     that all fields need copying, and then clear the fields that should
1726     not be copied.  That is the sensible default behavior, and forces
1727     us to explicitly document why we are *not* copying a flag.  */
1728  if (evd->dummy)
1729    copy = NULL;
1730  else
1731    copy = shallow_copy_rtx (orig);
1732
1733  format_ptr = GET_RTX_FORMAT (code);
1734
1735  for (i = 0; i < GET_RTX_LENGTH (code); i++)
1736    switch (*format_ptr++)
1737      {
1738      case 'e':
1739	if (XEXP (orig, i) != NULL)
1740	  {
1741	    rtx result = cselib_expand_value_rtx_1 (XEXP (orig, i), evd,
1742						    max_depth - 1);
1743	    if (!result)
1744	      return NULL;
1745	    if (copy)
1746	      XEXP (copy, i) = result;
1747	  }
1748	break;
1749
1750      case 'E':
1751      case 'V':
1752	if (XVEC (orig, i) != NULL)
1753	  {
1754	    if (copy)
1755	      XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
1756	    for (j = 0; j < XVECLEN (orig, i); j++)
1757	      {
1758		rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j),
1759							evd, max_depth - 1);
1760		if (!result)
1761		  return NULL;
1762		if (copy)
1763		  XVECEXP (copy, i, j) = result;
1764	      }
1765	  }
1766	break;
1767
1768      case 't':
1769      case 'w':
1770      case 'i':
1771      case 's':
1772      case 'S':
1773      case 'T':
1774      case 'u':
1775      case 'B':
1776      case '0':
1777	/* These are left unchanged.  */
1778	break;
1779
1780      default:
1781	gcc_unreachable ();
1782      }
1783
1784  if (evd->dummy)
1785    return orig;
1786
1787  mode = GET_MODE (copy);
1788  /* If an operand has been simplified into CONST_INT, which doesn't
1789     have a mode and the mode isn't derivable from whole rtx's mode,
1790     try simplify_*_operation first with mode from original's operand
1791     and as a fallback wrap CONST_INT into gen_rtx_CONST.  */
1792  scopy = copy;
1793  switch (GET_RTX_CLASS (code))
1794    {
1795    case RTX_UNARY:
1796      if (CONST_INT_P (XEXP (copy, 0))
1797	  && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1798	{
1799	  scopy = simplify_unary_operation (code, mode, XEXP (copy, 0),
1800					    GET_MODE (XEXP (orig, 0)));
1801	  if (scopy)
1802	    return scopy;
1803	}
1804      break;
1805    case RTX_COMM_ARITH:
1806    case RTX_BIN_ARITH:
1807      /* These expressions can derive operand modes from the whole rtx's mode.  */
1808      break;
1809    case RTX_TERNARY:
1810    case RTX_BITFIELD_OPS:
1811      if (CONST_INT_P (XEXP (copy, 0))
1812	  && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1813	{
1814	  scopy = simplify_ternary_operation (code, mode,
1815					      GET_MODE (XEXP (orig, 0)),
1816					      XEXP (copy, 0), XEXP (copy, 1),
1817					      XEXP (copy, 2));
1818	  if (scopy)
1819	    return scopy;
1820	}
1821      break;
1822    case RTX_COMPARE:
1823    case RTX_COMM_COMPARE:
1824      if (CONST_INT_P (XEXP (copy, 0))
1825	  && GET_MODE (XEXP (copy, 1)) == VOIDmode
1826	  && (GET_MODE (XEXP (orig, 0)) != VOIDmode
1827	      || GET_MODE (XEXP (orig, 1)) != VOIDmode))
1828	{
1829	  scopy = simplify_relational_operation (code, mode,
1830						 (GET_MODE (XEXP (orig, 0))
1831						  != VOIDmode)
1832						 ? GET_MODE (XEXP (orig, 0))
1833						 : GET_MODE (XEXP (orig, 1)),
1834						 XEXP (copy, 0),
1835						 XEXP (copy, 1));
1836	  if (scopy)
1837	    return scopy;
1838	}
1839      break;
1840    default:
1841      break;
1842    }
1843  scopy = simplify_rtx (copy);
1844  if (scopy)
1845    return scopy;
1846  return copy;
1847}
1848
1849/* Walk rtx X and replace all occurrences of REG and MEM subexpressions
1850   with VALUE expressions.  This way, it becomes independent of changes
1851   to registers and memory.
1852   X isn't actually modified; if modifications are needed, new rtl is
1853   allocated.  However, the return value can share rtl with X.
1854   If X is within a MEM, MEMMODE must be the mode of the MEM.  */
1855
1856rtx
1857cselib_subst_to_values (rtx x, machine_mode memmode)
1858{
1859  enum rtx_code code = GET_CODE (x);
1860  const char *fmt = GET_RTX_FORMAT (code);
1861  cselib_val *e;
1862  struct elt_list *l;
1863  rtx copy = x;
1864  int i;
1865
1866  switch (code)
1867    {
1868    case REG:
1869      l = REG_VALUES (REGNO (x));
1870      if (l && l->elt == NULL)
1871	l = l->next;
1872      for (; l; l = l->next)
1873	if (GET_MODE (l->elt->val_rtx) == GET_MODE (x))
1874	  return l->elt->val_rtx;
1875
1876      gcc_unreachable ();
1877
1878    case MEM:
1879      e = cselib_lookup_mem (x, 0);
1880      /* This used to happen for autoincrements, but we deal with them
1881	 properly now.  Remove the if stmt for the next release.  */
1882      if (! e)
1883	{
1884	  /* Assign a value that doesn't match any other.  */
1885	  e = new_cselib_val (next_uid, GET_MODE (x), x);
1886	}
1887      return e->val_rtx;
1888
1889    case ENTRY_VALUE:
1890      e = cselib_lookup (x, GET_MODE (x), 0, memmode);
1891      if (! e)
1892	break;
1893      return e->val_rtx;
1894
1895    CASE_CONST_ANY:
1896      return x;
1897
1898    case PRE_DEC:
1899    case PRE_INC:
1900      gcc_assert (memmode != VOIDmode);
1901      i = GET_MODE_SIZE (memmode);
1902      if (code == PRE_DEC)
1903	i = -i;
1904      return cselib_subst_to_values (plus_constant (GET_MODE (x),
1905						    XEXP (x, 0), i),
1906				     memmode);
1907
1908    case PRE_MODIFY:
1909      gcc_assert (memmode != VOIDmode);
1910      return cselib_subst_to_values (XEXP (x, 1), memmode);
1911
1912    case POST_DEC:
1913    case POST_INC:
1914    case POST_MODIFY:
1915      gcc_assert (memmode != VOIDmode);
1916      return cselib_subst_to_values (XEXP (x, 0), memmode);
1917
1918    default:
1919      break;
1920    }
1921
1922  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1923    {
1924      if (fmt[i] == 'e')
1925	{
1926	  rtx t = cselib_subst_to_values (XEXP (x, i), memmode);
1927
1928	  if (t != XEXP (x, i))
1929	    {
1930	      if (x == copy)
1931		copy = shallow_copy_rtx (x);
1932	      XEXP (copy, i) = t;
1933	    }
1934	}
1935      else if (fmt[i] == 'E')
1936	{
1937	  int j;
1938
1939	  for (j = 0; j < XVECLEN (x, i); j++)
1940	    {
1941	      rtx t = cselib_subst_to_values (XVECEXP (x, i, j), memmode);
1942
1943	      if (t != XVECEXP (x, i, j))
1944		{
1945		  if (XVEC (x, i) == XVEC (copy, i))
1946		    {
1947		      if (x == copy)
1948			copy = shallow_copy_rtx (x);
1949		      XVEC (copy, i) = shallow_copy_rtvec (XVEC (x, i));
1950		    }
1951		  XVECEXP (copy, i, j) = t;
1952		}
1953	    }
1954	}
1955    }
1956
1957  return copy;
1958}
1959
1960/* Wrapper for cselib_subst_to_values, that indicates X is in INSN.  */
1961
1962rtx
1963cselib_subst_to_values_from_insn (rtx x, machine_mode memmode, rtx_insn *insn)
1964{
1965  rtx ret;
1966  gcc_assert (!cselib_current_insn);
1967  cselib_current_insn = insn;
1968  ret = cselib_subst_to_values (x, memmode);
1969  cselib_current_insn = NULL;
1970  return ret;
1971}
1972
1973/* Look up the rtl expression X in our tables and return the value it
1974   has.  If CREATE is zero, we return NULL if we don't know the value.
1975   Otherwise, we create a new one if possible, using mode MODE if X
1976   doesn't have a mode (i.e. because it's a constant).  When X is part
1977   of an address, MEMMODE should be the mode of the enclosing MEM if
1978   we're tracking autoinc expressions.  */
1979
1980static cselib_val *
1981cselib_lookup_1 (rtx x, machine_mode mode,
1982		 int create, machine_mode memmode)
1983{
1984  cselib_val **slot;
1985  cselib_val *e;
1986  unsigned int hashval;
1987
1988  if (GET_MODE (x) != VOIDmode)
1989    mode = GET_MODE (x);
1990
1991  if (GET_CODE (x) == VALUE)
1992    return CSELIB_VAL_PTR (x);
1993
1994  if (REG_P (x))
1995    {
1996      struct elt_list *l;
1997      unsigned int i = REGNO (x);
1998
1999      l = REG_VALUES (i);
2000      if (l && l->elt == NULL)
2001	l = l->next;
2002      for (; l; l = l->next)
2003	if (mode == GET_MODE (l->elt->val_rtx))
2004	  {
2005	    promote_debug_loc (l->elt->locs);
2006	    return l->elt;
2007	  }
2008
2009      if (! create)
2010	return 0;
2011
2012      if (i < FIRST_PSEUDO_REGISTER)
2013	{
2014	  unsigned int n = hard_regno_nregs[i][mode];
2015
2016	  if (n > max_value_regs)
2017	    max_value_regs = n;
2018	}
2019
2020      e = new_cselib_val (next_uid, GET_MODE (x), x);
2021      new_elt_loc_list (e, x);
2022      if (REG_VALUES (i) == 0)
2023	{
2024	  /* Maintain the invariant that the first entry of
2025	     REG_VALUES, if present, must be the value used to set the
2026	     register, or NULL.  */
2027	  used_regs[n_used_regs++] = i;
2028	  REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
2029	}
2030      else if (cselib_preserve_constants
2031	       && GET_MODE_CLASS (mode) == MODE_INT)
2032	{
2033	  /* During var-tracking, try harder to find equivalences
2034	     for SUBREGs.  If a setter sets say a DImode register
2035	     and user uses that register only in SImode, add a lowpart
2036	     subreg location.  */
2037	  struct elt_list *lwider = NULL;
2038	  l = REG_VALUES (i);
2039	  if (l && l->elt == NULL)
2040	    l = l->next;
2041	  for (; l; l = l->next)
2042	    if (GET_MODE_CLASS (GET_MODE (l->elt->val_rtx)) == MODE_INT
2043		&& GET_MODE_SIZE (GET_MODE (l->elt->val_rtx))
2044		   > GET_MODE_SIZE (mode)
2045		&& (lwider == NULL
2046		    || GET_MODE_SIZE (GET_MODE (l->elt->val_rtx))
2047		       < GET_MODE_SIZE (GET_MODE (lwider->elt->val_rtx))))
2048	      {
2049		struct elt_loc_list *el;
2050		if (i < FIRST_PSEUDO_REGISTER
2051		    && hard_regno_nregs[i][GET_MODE (l->elt->val_rtx)] != 1)
2052		  continue;
2053		for (el = l->elt->locs; el; el = el->next)
2054		  if (!REG_P (el->loc))
2055		    break;
2056		if (el)
2057		  lwider = l;
2058	      }
2059	  if (lwider)
2060	    {
2061	      rtx sub = lowpart_subreg (mode, lwider->elt->val_rtx,
2062					GET_MODE (lwider->elt->val_rtx));
2063	      if (sub)
2064		new_elt_loc_list (e, sub);
2065	    }
2066	}
2067      REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
2068      slot = cselib_find_slot (mode, x, e->hash, INSERT, memmode);
2069      *slot = e;
2070      return e;
2071    }
2072
2073  if (MEM_P (x))
2074    return cselib_lookup_mem (x, create);
2075
2076  hashval = cselib_hash_rtx (x, create, memmode);
2077  /* Can't even create if hashing is not possible.  */
2078  if (! hashval)
2079    return 0;
2080
2081  slot = cselib_find_slot (mode, x, hashval,
2082			   create ? INSERT : NO_INSERT, memmode);
2083  if (slot == 0)
2084    return 0;
2085
2086  e = (cselib_val *) *slot;
2087  if (e)
2088    return e;
2089
2090  e = new_cselib_val (hashval, mode, x);
2091
2092  /* We have to fill the slot before calling cselib_subst_to_values:
2093     the hash table is inconsistent until we do so, and
2094     cselib_subst_to_values will need to do lookups.  */
2095  *slot = e;
2096  new_elt_loc_list (e, cselib_subst_to_values (x, memmode));
2097  return e;
2098}
2099
2100/* Wrapper for cselib_lookup, that indicates X is in INSN.  */
2101
2102cselib_val *
2103cselib_lookup_from_insn (rtx x, machine_mode mode,
2104			 int create, machine_mode memmode, rtx_insn *insn)
2105{
2106  cselib_val *ret;
2107
2108  gcc_assert (!cselib_current_insn);
2109  cselib_current_insn = insn;
2110
2111  ret = cselib_lookup (x, mode, create, memmode);
2112
2113  cselib_current_insn = NULL;
2114
2115  return ret;
2116}
2117
2118/* Wrapper for cselib_lookup_1, that logs the lookup result and
2119   maintains invariants related with debug insns.  */
2120
2121cselib_val *
2122cselib_lookup (rtx x, machine_mode mode,
2123	       int create, machine_mode memmode)
2124{
2125  cselib_val *ret = cselib_lookup_1 (x, mode, create, memmode);
2126
2127  /* ??? Should we return NULL if we're not to create an entry, the
2128     found loc is a debug loc and cselib_current_insn is not DEBUG?
2129     If so, we should also avoid converting val to non-DEBUG; probably
2130     easiest setting cselib_current_insn to NULL before the call
2131     above.  */
2132
2133  if (dump_file && (dump_flags & TDF_CSELIB))
2134    {
2135      fputs ("cselib lookup ", dump_file);
2136      print_inline_rtx (dump_file, x, 2);
2137      fprintf (dump_file, " => %u:%u\n",
2138	       ret ? ret->uid : 0,
2139	       ret ? ret->hash : 0);
2140    }
2141
2142  return ret;
2143}
2144
2145/* Invalidate any entries in reg_values that overlap REGNO.  This is called
2146   if REGNO is changing.  MODE is the mode of the assignment to REGNO, which
2147   is used to determine how many hard registers are being changed.  If MODE
2148   is VOIDmode, then only REGNO is being changed; this is used when
2149   invalidating call clobbered registers across a call.  */
2150
2151static void
2152cselib_invalidate_regno (unsigned int regno, machine_mode mode)
2153{
2154  unsigned int endregno;
2155  unsigned int i;
2156
2157  /* If we see pseudos after reload, something is _wrong_.  */
2158  gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER
2159	      || reg_renumber[regno] < 0);
2160
2161  /* Determine the range of registers that must be invalidated.  For
2162     pseudos, only REGNO is affected.  For hard regs, we must take MODE
2163     into account, and we must also invalidate lower register numbers
2164     if they contain values that overlap REGNO.  */
2165  if (regno < FIRST_PSEUDO_REGISTER)
2166    {
2167      gcc_assert (mode != VOIDmode);
2168
2169      if (regno < max_value_regs)
2170	i = 0;
2171      else
2172	i = regno - max_value_regs;
2173
2174      endregno = end_hard_regno (mode, regno);
2175    }
2176  else
2177    {
2178      i = regno;
2179      endregno = regno + 1;
2180    }
2181
2182  for (; i < endregno; i++)
2183    {
2184      struct elt_list **l = &REG_VALUES (i);
2185
2186      /* Go through all known values for this reg; if it overlaps the range
2187	 we're invalidating, remove the value.  */
2188      while (*l)
2189	{
2190	  cselib_val *v = (*l)->elt;
2191	  bool had_locs;
2192	  rtx setting_insn;
2193	  struct elt_loc_list **p;
2194	  unsigned int this_last = i;
2195
2196	  if (i < FIRST_PSEUDO_REGISTER && v != NULL)
2197	    this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1;
2198
2199	  if (this_last < regno || v == NULL
2200	      || (v == cfa_base_preserved_val
2201		  && i == cfa_base_preserved_regno))
2202	    {
2203	      l = &(*l)->next;
2204	      continue;
2205	    }
2206
2207	  /* We have an overlap.  */
2208	  if (*l == REG_VALUES (i))
2209	    {
2210	      /* Maintain the invariant that the first entry of
2211		 REG_VALUES, if present, must be the value used to set
2212		 the register, or NULL.  This is also nice because
2213		 then we won't push the same regno onto user_regs
2214		 multiple times.  */
2215	      (*l)->elt = NULL;
2216	      l = &(*l)->next;
2217	    }
2218	  else
2219	    unchain_one_elt_list (l);
2220
2221	  v = canonical_cselib_val (v);
2222
2223	  had_locs = v->locs != NULL;
2224	  setting_insn = v->locs ? v->locs->setting_insn : NULL;
2225
2226	  /* Now, we clear the mapping from value to reg.  It must exist, so
2227	     this code will crash intentionally if it doesn't.  */
2228	  for (p = &v->locs; ; p = &(*p)->next)
2229	    {
2230	      rtx x = (*p)->loc;
2231
2232	      if (REG_P (x) && REGNO (x) == i)
2233		{
2234		  unchain_one_elt_loc_list (p);
2235		  break;
2236		}
2237	    }
2238
2239	  if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
2240	    {
2241	      if (setting_insn && DEBUG_INSN_P (setting_insn))
2242		n_useless_debug_values++;
2243	      else
2244		n_useless_values++;
2245	    }
2246	}
2247    }
2248}
2249
2250/* Invalidate any locations in the table which are changed because of a
2251   store to MEM_RTX.  If this is called because of a non-const call
2252   instruction, MEM_RTX is (mem:BLK const0_rtx).  */
2253
2254static void
2255cselib_invalidate_mem (rtx mem_rtx)
2256{
2257  cselib_val **vp, *v, *next;
2258  int num_mems = 0;
2259  rtx mem_addr;
2260
2261  mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0)));
2262  mem_rtx = canon_rtx (mem_rtx);
2263
2264  vp = &first_containing_mem;
2265  for (v = *vp; v != &dummy_val; v = next)
2266    {
2267      bool has_mem = false;
2268      struct elt_loc_list **p = &v->locs;
2269      bool had_locs = v->locs != NULL;
2270      rtx setting_insn = v->locs ? v->locs->setting_insn : NULL;
2271
2272      while (*p)
2273	{
2274	  rtx x = (*p)->loc;
2275	  cselib_val *addr;
2276	  struct elt_list **mem_chain;
2277
2278	  /* MEMs may occur in locations only at the top level; below
2279	     that every MEM or REG is substituted by its VALUE.  */
2280	  if (!MEM_P (x))
2281	    {
2282	      p = &(*p)->next;
2283	      continue;
2284	    }
2285	  if (num_mems < PARAM_VALUE (PARAM_MAX_CSELIB_MEMORY_LOCATIONS)
2286	      && ! canon_anti_dependence (x, false, mem_rtx,
2287					  GET_MODE (mem_rtx), mem_addr))
2288	    {
2289	      has_mem = true;
2290	      num_mems++;
2291	      p = &(*p)->next;
2292	      continue;
2293	    }
2294
2295	  /* This one overlaps.  */
2296	  /* We must have a mapping from this MEM's address to the
2297	     value (E).  Remove that, too.  */
2298	  addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0, GET_MODE (x));
2299	  addr = canonical_cselib_val (addr);
2300	  gcc_checking_assert (v == canonical_cselib_val (v));
2301	  mem_chain = &addr->addr_list;
2302	  for (;;)
2303	    {
2304	      cselib_val *canon = canonical_cselib_val ((*mem_chain)->elt);
2305
2306	      if (canon == v)
2307		{
2308		  unchain_one_elt_list (mem_chain);
2309		  break;
2310		}
2311
2312	      /* Record canonicalized elt.  */
2313	      (*mem_chain)->elt = canon;
2314
2315	      mem_chain = &(*mem_chain)->next;
2316	    }
2317
2318	  unchain_one_elt_loc_list (p);
2319	}
2320
2321      if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
2322	{
2323	  if (setting_insn && DEBUG_INSN_P (setting_insn))
2324	    n_useless_debug_values++;
2325	  else
2326	    n_useless_values++;
2327	}
2328
2329      next = v->next_containing_mem;
2330      if (has_mem)
2331	{
2332	  *vp = v;
2333	  vp = &(*vp)->next_containing_mem;
2334	}
2335      else
2336	v->next_containing_mem = NULL;
2337    }
2338  *vp = &dummy_val;
2339}
2340
2341/* Invalidate DEST, which is being assigned to or clobbered.  */
2342
2343void
2344cselib_invalidate_rtx (rtx dest)
2345{
2346  while (GET_CODE (dest) == SUBREG
2347	 || GET_CODE (dest) == ZERO_EXTRACT
2348	 || GET_CODE (dest) == STRICT_LOW_PART)
2349    dest = XEXP (dest, 0);
2350
2351  if (REG_P (dest))
2352    cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
2353  else if (MEM_P (dest))
2354    cselib_invalidate_mem (dest);
2355}
2356
2357/* A wrapper for cselib_invalidate_rtx to be called via note_stores.  */
2358
2359static void
2360cselib_invalidate_rtx_note_stores (rtx dest, const_rtx ignore ATTRIBUTE_UNUSED,
2361				   void *data ATTRIBUTE_UNUSED)
2362{
2363  cselib_invalidate_rtx (dest);
2364}
2365
2366/* Record the result of a SET instruction.  DEST is being set; the source
2367   contains the value described by SRC_ELT.  If DEST is a MEM, DEST_ADDR_ELT
2368   describes its address.  */
2369
2370static void
2371cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt)
2372{
2373  int dreg = REG_P (dest) ? (int) REGNO (dest) : -1;
2374
2375  if (src_elt == 0 || side_effects_p (dest))
2376    return;
2377
2378  if (dreg >= 0)
2379    {
2380      if (dreg < FIRST_PSEUDO_REGISTER)
2381	{
2382	  unsigned int n = hard_regno_nregs[dreg][GET_MODE (dest)];
2383
2384	  if (n > max_value_regs)
2385	    max_value_regs = n;
2386	}
2387
2388      if (REG_VALUES (dreg) == 0)
2389	{
2390	  used_regs[n_used_regs++] = dreg;
2391	  REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
2392	}
2393      else
2394	{
2395	  /* The register should have been invalidated.  */
2396	  gcc_assert (REG_VALUES (dreg)->elt == 0);
2397	  REG_VALUES (dreg)->elt = src_elt;
2398	}
2399
2400      if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2401	n_useless_values--;
2402      new_elt_loc_list (src_elt, dest);
2403    }
2404  else if (MEM_P (dest) && dest_addr_elt != 0
2405	   && cselib_record_memory)
2406    {
2407      if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2408	n_useless_values--;
2409      add_mem_for_addr (dest_addr_elt, src_elt, dest);
2410    }
2411}
2412
2413/* Make ELT and X's VALUE equivalent to each other at INSN.  */
2414
2415void
2416cselib_add_permanent_equiv (cselib_val *elt, rtx x, rtx_insn *insn)
2417{
2418  cselib_val *nelt;
2419  rtx_insn *save_cselib_current_insn = cselib_current_insn;
2420
2421  gcc_checking_assert (elt);
2422  gcc_checking_assert (PRESERVED_VALUE_P (elt->val_rtx));
2423  gcc_checking_assert (!side_effects_p (x));
2424
2425  cselib_current_insn = insn;
2426
2427  nelt = cselib_lookup (x, GET_MODE (elt->val_rtx), 1, VOIDmode);
2428
2429  if (nelt != elt)
2430    {
2431      cselib_any_perm_equivs = true;
2432
2433      if (!PRESERVED_VALUE_P (nelt->val_rtx))
2434	cselib_preserve_value (nelt);
2435
2436      new_elt_loc_list (nelt, elt->val_rtx);
2437    }
2438
2439  cselib_current_insn = save_cselib_current_insn;
2440}
2441
2442/* Return TRUE if any permanent equivalences have been recorded since
2443   the table was last initialized.  */
2444bool
2445cselib_have_permanent_equivalences (void)
2446{
2447  return cselib_any_perm_equivs;
2448}
2449
2450/* There is no good way to determine how many elements there can be
2451   in a PARALLEL.  Since it's fairly cheap, use a really large number.  */
2452#define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
2453
2454struct cselib_record_autoinc_data
2455{
2456  struct cselib_set *sets;
2457  int n_sets;
2458};
2459
2460/* Callback for for_each_inc_dec.  Records in ARG the SETs implied by
2461   autoinc RTXs: SRC plus SRCOFF if non-NULL is stored in DEST.  */
2462
2463static int
2464cselib_record_autoinc_cb (rtx mem ATTRIBUTE_UNUSED, rtx op ATTRIBUTE_UNUSED,
2465			  rtx dest, rtx src, rtx srcoff, void *arg)
2466{
2467  struct cselib_record_autoinc_data *data;
2468  data = (struct cselib_record_autoinc_data *)arg;
2469
2470  data->sets[data->n_sets].dest = dest;
2471
2472  if (srcoff)
2473    data->sets[data->n_sets].src = gen_rtx_PLUS (GET_MODE (src), src, srcoff);
2474  else
2475    data->sets[data->n_sets].src = src;
2476
2477  data->n_sets++;
2478
2479  return 0;
2480}
2481
2482/* Record the effects of any sets and autoincs in INSN.  */
2483static void
2484cselib_record_sets (rtx_insn *insn)
2485{
2486  int n_sets = 0;
2487  int i;
2488  struct cselib_set sets[MAX_SETS];
2489  rtx body = PATTERN (insn);
2490  rtx cond = 0;
2491  int n_sets_before_autoinc;
2492  struct cselib_record_autoinc_data data;
2493
2494  body = PATTERN (insn);
2495  if (GET_CODE (body) == COND_EXEC)
2496    {
2497      cond = COND_EXEC_TEST (body);
2498      body = COND_EXEC_CODE (body);
2499    }
2500
2501  /* Find all sets.  */
2502  if (GET_CODE (body) == SET)
2503    {
2504      sets[0].src = SET_SRC (body);
2505      sets[0].dest = SET_DEST (body);
2506      n_sets = 1;
2507    }
2508  else if (GET_CODE (body) == PARALLEL)
2509    {
2510      /* Look through the PARALLEL and record the values being
2511	 set, if possible.  Also handle any CLOBBERs.  */
2512      for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
2513	{
2514	  rtx x = XVECEXP (body, 0, i);
2515
2516	  if (GET_CODE (x) == SET)
2517	    {
2518	      sets[n_sets].src = SET_SRC (x);
2519	      sets[n_sets].dest = SET_DEST (x);
2520	      n_sets++;
2521	    }
2522	}
2523    }
2524
2525  if (n_sets == 1
2526      && MEM_P (sets[0].src)
2527      && !cselib_record_memory
2528      && MEM_READONLY_P (sets[0].src))
2529    {
2530      rtx note = find_reg_equal_equiv_note (insn);
2531
2532      if (note && CONSTANT_P (XEXP (note, 0)))
2533	sets[0].src = XEXP (note, 0);
2534    }
2535
2536  data.sets = sets;
2537  data.n_sets = n_sets_before_autoinc = n_sets;
2538  for_each_inc_dec (PATTERN (insn), cselib_record_autoinc_cb, &data);
2539  n_sets = data.n_sets;
2540
2541  /* Look up the values that are read.  Do this before invalidating the
2542     locations that are written.  */
2543  for (i = 0; i < n_sets; i++)
2544    {
2545      rtx dest = sets[i].dest;
2546
2547      /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
2548         the low part after invalidating any knowledge about larger modes.  */
2549      if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
2550	sets[i].dest = dest = XEXP (dest, 0);
2551
2552      /* We don't know how to record anything but REG or MEM.  */
2553      if (REG_P (dest)
2554	  || (MEM_P (dest) && cselib_record_memory))
2555        {
2556	  rtx src = sets[i].src;
2557	  if (cond)
2558	    src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest);
2559	  sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1, VOIDmode);
2560	  if (MEM_P (dest))
2561	    {
2562	      machine_mode address_mode = get_address_mode (dest);
2563
2564	      sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
2565						     address_mode, 1,
2566						     GET_MODE (dest));
2567	    }
2568	  else
2569	    sets[i].dest_addr_elt = 0;
2570	}
2571    }
2572
2573  if (cselib_record_sets_hook)
2574    cselib_record_sets_hook (insn, sets, n_sets);
2575
2576  /* Invalidate all locations written by this insn.  Note that the elts we
2577     looked up in the previous loop aren't affected, just some of their
2578     locations may go away.  */
2579  note_stores (body, cselib_invalidate_rtx_note_stores, NULL);
2580
2581  for (i = n_sets_before_autoinc; i < n_sets; i++)
2582    cselib_invalidate_rtx (sets[i].dest);
2583
2584  /* If this is an asm, look for duplicate sets.  This can happen when the
2585     user uses the same value as an output multiple times.  This is valid
2586     if the outputs are not actually used thereafter.  Treat this case as
2587     if the value isn't actually set.  We do this by smashing the destination
2588     to pc_rtx, so that we won't record the value later.  */
2589  if (n_sets >= 2 && asm_noperands (body) >= 0)
2590    {
2591      for (i = 0; i < n_sets; i++)
2592	{
2593	  rtx dest = sets[i].dest;
2594	  if (REG_P (dest) || MEM_P (dest))
2595	    {
2596	      int j;
2597	      for (j = i + 1; j < n_sets; j++)
2598		if (rtx_equal_p (dest, sets[j].dest))
2599		  {
2600		    sets[i].dest = pc_rtx;
2601		    sets[j].dest = pc_rtx;
2602		  }
2603	    }
2604	}
2605    }
2606
2607  /* Now enter the equivalences in our tables.  */
2608  for (i = 0; i < n_sets; i++)
2609    {
2610      rtx dest = sets[i].dest;
2611      if (REG_P (dest)
2612	  || (MEM_P (dest) && cselib_record_memory))
2613	cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
2614    }
2615}
2616
2617/* Return true if INSN in the prologue initializes hard_frame_pointer_rtx.  */
2618
2619bool
2620fp_setter_insn (rtx insn)
2621{
2622  rtx expr, pat = NULL_RTX;
2623
2624  if (!RTX_FRAME_RELATED_P (insn))
2625    return false;
2626
2627  expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
2628  if (expr)
2629    pat = XEXP (expr, 0);
2630  if (!modified_in_p (hard_frame_pointer_rtx, pat ? pat : insn))
2631    return false;
2632
2633  /* Don't return true for frame pointer restores in the epilogue.  */
2634  if (find_reg_note (insn, REG_CFA_RESTORE, hard_frame_pointer_rtx))
2635    return false;
2636  return true;
2637}
2638
2639/* Record the effects of INSN.  */
2640
2641void
2642cselib_process_insn (rtx_insn *insn)
2643{
2644  int i;
2645  rtx x;
2646
2647  cselib_current_insn = insn;
2648
2649  /* Forget everything at a CODE_LABEL or a setjmp.  */
2650  if ((LABEL_P (insn)
2651       || (CALL_P (insn)
2652	   && find_reg_note (insn, REG_SETJMP, NULL)))
2653      && !cselib_preserve_constants)
2654    {
2655      cselib_reset_table (next_uid);
2656      cselib_current_insn = NULL;
2657      return;
2658    }
2659
2660  if (! INSN_P (insn))
2661    {
2662      cselib_current_insn = NULL;
2663      return;
2664    }
2665
2666  /* If this is a call instruction, forget anything stored in a
2667     call clobbered register, or, if this is not a const call, in
2668     memory.  */
2669  if (CALL_P (insn))
2670    {
2671      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2672	if (call_used_regs[i]
2673	    || (REG_VALUES (i) && REG_VALUES (i)->elt
2674		&& HARD_REGNO_CALL_PART_CLOBBERED (i,
2675		      GET_MODE (REG_VALUES (i)->elt->val_rtx))))
2676	  cselib_invalidate_regno (i, reg_raw_mode[i]);
2677
2678      /* Since it is not clear how cselib is going to be used, be
2679	 conservative here and treat looping pure or const functions
2680	 as if they were regular functions.  */
2681      if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)
2682	  || !(RTL_CONST_OR_PURE_CALL_P (insn)))
2683	cselib_invalidate_mem (callmem);
2684    }
2685
2686  cselib_record_sets (insn);
2687
2688  /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
2689     after we have processed the insn.  */
2690  if (CALL_P (insn))
2691    {
2692      for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
2693	if (GET_CODE (XEXP (x, 0)) == CLOBBER)
2694	  cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
2695      /* Flush evertything on setjmp.  */
2696      if (cselib_preserve_constants
2697	  && find_reg_note (insn, REG_SETJMP, NULL))
2698	{
2699	  cselib_preserve_only_values ();
2700	  cselib_reset_table (next_uid);
2701	}
2702    }
2703
2704  /* On setter of the hard frame pointer if frame_pointer_needed,
2705     invalidate stack_pointer_rtx, so that sp and {,h}fp based
2706     VALUEs are distinct.  */
2707  if (reload_completed
2708      && frame_pointer_needed
2709      && fp_setter_insn (insn))
2710    cselib_invalidate_rtx (stack_pointer_rtx);
2711
2712  cselib_current_insn = NULL;
2713
2714  if (n_useless_values > MAX_USELESS_VALUES
2715      /* remove_useless_values is linear in the hash table size.  Avoid
2716         quadratic behavior for very large hashtables with very few
2717	 useless elements.  */
2718      && ((unsigned int)n_useless_values
2719	  > (cselib_hash_table->elements () - n_debug_values) / 4))
2720    remove_useless_values ();
2721}
2722
2723/* Initialize cselib for one pass.  The caller must also call
2724   init_alias_analysis.  */
2725
2726void
2727cselib_init (int record_what)
2728{
2729  elt_list_pool = create_alloc_pool ("elt_list",
2730				     sizeof (struct elt_list), 10);
2731  elt_loc_list_pool = create_alloc_pool ("elt_loc_list",
2732				         sizeof (struct elt_loc_list), 10);
2733  cselib_val_pool = create_alloc_pool ("cselib_val_list",
2734				       sizeof (cselib_val), 10);
2735  value_pool = create_alloc_pool ("value", RTX_CODE_SIZE (VALUE), 100);
2736  cselib_record_memory = record_what & CSELIB_RECORD_MEMORY;
2737  cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS;
2738  cselib_any_perm_equivs = false;
2739
2740  /* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
2741     see canon_true_dependence.  This is only created once.  */
2742  if (! callmem)
2743    callmem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2744
2745  cselib_nregs = max_reg_num ();
2746
2747  /* We preserve reg_values to allow expensive clearing of the whole thing.
2748     Reallocate it however if it happens to be too large.  */
2749  if (!reg_values || reg_values_size < cselib_nregs
2750      || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4))
2751    {
2752      free (reg_values);
2753      /* Some space for newly emit instructions so we don't end up
2754	 reallocating in between passes.  */
2755      reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16;
2756      reg_values = XCNEWVEC (struct elt_list *, reg_values_size);
2757    }
2758  used_regs = XNEWVEC (unsigned int, cselib_nregs);
2759  n_used_regs = 0;
2760  cselib_hash_table = new hash_table<cselib_hasher> (31);
2761  if (cselib_preserve_constants)
2762    cselib_preserved_hash_table = new hash_table<cselib_hasher> (31);
2763  next_uid = 1;
2764}
2765
2766/* Called when the current user is done with cselib.  */
2767
2768void
2769cselib_finish (void)
2770{
2771  bool preserved = cselib_preserve_constants;
2772  cselib_discard_hook = NULL;
2773  cselib_preserve_constants = false;
2774  cselib_any_perm_equivs = false;
2775  cfa_base_preserved_val = NULL;
2776  cfa_base_preserved_regno = INVALID_REGNUM;
2777  free_alloc_pool (elt_list_pool);
2778  free_alloc_pool (elt_loc_list_pool);
2779  free_alloc_pool (cselib_val_pool);
2780  free_alloc_pool (value_pool);
2781  cselib_clear_table ();
2782  delete cselib_hash_table;
2783  cselib_hash_table = NULL;
2784  if (preserved)
2785    delete cselib_preserved_hash_table;
2786  cselib_preserved_hash_table = NULL;
2787  free (used_regs);
2788  used_regs = 0;
2789  n_useless_values = 0;
2790  n_useless_debug_values = 0;
2791  n_debug_values = 0;
2792  next_uid = 0;
2793}
2794
2795/* Dump the cselib_val *X to FILE *OUT.  */
2796
2797int
2798dump_cselib_val (cselib_val **x, FILE *out)
2799{
2800  cselib_val *v = *x;
2801  bool need_lf = true;
2802
2803  print_inline_rtx (out, v->val_rtx, 0);
2804
2805  if (v->locs)
2806    {
2807      struct elt_loc_list *l = v->locs;
2808      if (need_lf)
2809	{
2810	  fputc ('\n', out);
2811	  need_lf = false;
2812	}
2813      fputs (" locs:", out);
2814      do
2815	{
2816	  if (l->setting_insn)
2817	    fprintf (out, "\n  from insn %i ",
2818		     INSN_UID (l->setting_insn));
2819	  else
2820	    fprintf (out, "\n   ");
2821	  print_inline_rtx (out, l->loc, 4);
2822	}
2823      while ((l = l->next));
2824      fputc ('\n', out);
2825    }
2826  else
2827    {
2828      fputs (" no locs", out);
2829      need_lf = true;
2830    }
2831
2832  if (v->addr_list)
2833    {
2834      struct elt_list *e = v->addr_list;
2835      if (need_lf)
2836	{
2837	  fputc ('\n', out);
2838	  need_lf = false;
2839	}
2840      fputs (" addr list:", out);
2841      do
2842	{
2843	  fputs ("\n  ", out);
2844	  print_inline_rtx (out, e->elt->val_rtx, 2);
2845	}
2846      while ((e = e->next));
2847      fputc ('\n', out);
2848    }
2849  else
2850    {
2851      fputs (" no addrs", out);
2852      need_lf = true;
2853    }
2854
2855  if (v->next_containing_mem == &dummy_val)
2856    fputs (" last mem\n", out);
2857  else if (v->next_containing_mem)
2858    {
2859      fputs (" next mem ", out);
2860      print_inline_rtx (out, v->next_containing_mem->val_rtx, 2);
2861      fputc ('\n', out);
2862    }
2863  else if (need_lf)
2864    fputc ('\n', out);
2865
2866  return 1;
2867}
2868
2869/* Dump to OUT everything in the CSELIB table.  */
2870
2871void
2872dump_cselib_table (FILE *out)
2873{
2874  fprintf (out, "cselib hash table:\n");
2875  cselib_hash_table->traverse <FILE *, dump_cselib_val> (out);
2876  fprintf (out, "cselib preserved hash table:\n");
2877  cselib_preserved_hash_table->traverse <FILE *, dump_cselib_val> (out);
2878  if (first_containing_mem != &dummy_val)
2879    {
2880      fputs ("first mem ", out);
2881      print_inline_rtx (out, first_containing_mem->val_rtx, 2);
2882      fputc ('\n', out);
2883    }
2884  fprintf (out, "next uid %i\n", next_uid);
2885}
2886
2887#include "gt-cselib.h"
2888