postreload-gcse.c revision 1.8
1/* Post reload partially redundant load elimination
2   Copyright (C) 2004-2017 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "backend.h"
24#include "target.h"
25#include "rtl.h"
26#include "tree.h"
27#include "predict.h"
28#include "df.h"
29#include "memmodel.h"
30#include "tm_p.h"
31#include "insn-config.h"
32#include "emit-rtl.h"
33#include "recog.h"
34
35#include "cfgrtl.h"
36#include "profile.h"
37#include "expr.h"
38#include "params.h"
39#include "tree-pass.h"
40#include "dbgcnt.h"
41#include "gcse-common.h"
42
43/* The following code implements gcse after reload, the purpose of this
44   pass is to cleanup redundant loads generated by reload and other
45   optimizations that come after gcse. It searches for simple inter-block
46   redundancies and tries to eliminate them by adding moves and loads
47   in cold places.
48
49   Perform partially redundant load elimination, try to eliminate redundant
50   loads created by the reload pass.  We try to look for full or partial
51   redundant loads fed by one or more loads/stores in predecessor BBs,
52   and try adding loads to make them fully redundant.  We also check if
53   it's worth adding loads to be able to delete the redundant load.
54
55   Algorithm:
56   1. Build available expressions hash table:
57       For each load/store instruction, if the loaded/stored memory didn't
58       change until the end of the basic block add this memory expression to
59       the hash table.
60   2. Perform Redundancy elimination:
61      For each load instruction do the following:
62	 perform partial redundancy elimination, check if it's worth adding
63	 loads to make the load fully redundant.  If so add loads and
64	 register copies and delete the load.
65   3. Delete instructions made redundant in step 2.
66
67   Future enhancement:
68     If the loaded register is used/defined between load and some store,
69     look for some other free register between load and all its stores,
70     and replace the load with a copy from this register to the loaded
71     register.
72*/
73
74
75/* Keep statistics of this pass.  */
76static struct
77{
78  int moves_inserted;
79  int copies_inserted;
80  int insns_deleted;
81} stats;
82
83/* We need to keep a hash table of expressions.  The table entries are of
84   type 'struct expr', and for each expression there is a single linked
85   list of occurrences.  */
86
87/* Expression elements in the hash table.  */
88struct expr
89{
90  /* The expression (SET_SRC for expressions, PATTERN for assignments).  */
91  rtx expr;
92
93  /* The same hash for this entry.  */
94  hashval_t hash;
95
96  /* Index in the transparent bitmaps.  */
97  unsigned int bitmap_index;
98
99  /* List of available occurrence in basic blocks in the function.  */
100  struct occr *avail_occr;
101};
102
103/* Hashtable helpers.  */
104
105struct expr_hasher : nofree_ptr_hash <expr>
106{
107  static inline hashval_t hash (const expr *);
108  static inline bool equal (const expr *, const expr *);
109};
110
111
112/* Hash expression X.
113   DO_NOT_RECORD_P is a boolean indicating if a volatile operand is found
114   or if the expression contains something we don't want to insert in the
115   table.  */
116
117static hashval_t
118hash_expr (rtx x, int *do_not_record_p)
119{
120  *do_not_record_p = 0;
121  return hash_rtx (x, GET_MODE (x), do_not_record_p,
122		   NULL,  /*have_reg_qty=*/false);
123}
124
125/* Callback for hashtab.
126   Return the hash value for expression EXP.  We don't actually hash
127   here, we just return the cached hash value.  */
128
129inline hashval_t
130expr_hasher::hash (const expr *exp)
131{
132  return exp->hash;
133}
134
135/* Callback for hashtab.
136   Return nonzero if exp1 is equivalent to exp2.  */
137
138inline bool
139expr_hasher::equal (const expr *exp1, const expr *exp2)
140{
141  int equiv_p = exp_equiv_p (exp1->expr, exp2->expr, 0, true);
142
143  gcc_assert (!equiv_p || exp1->hash == exp2->hash);
144  return equiv_p;
145}
146
147/* The table itself.  */
148static hash_table<expr_hasher> *expr_table;
149
150
151static struct obstack expr_obstack;
152
153/* Occurrence of an expression.
154   There is at most one occurrence per basic block.  If a pattern appears
155   more than once, the last appearance is used.  */
156
157struct occr
158{
159  /* Next occurrence of this expression.  */
160  struct occr *next;
161  /* The insn that computes the expression.  */
162  rtx_insn *insn;
163  /* Nonzero if this [anticipatable] occurrence has been deleted.  */
164  char deleted_p;
165};
166
167static struct obstack occr_obstack;
168
169/* The following structure holds the information about the occurrences of
170   the redundant instructions.  */
171struct unoccr
172{
173  struct unoccr *next;
174  edge pred;
175  rtx_insn *insn;
176};
177
178static struct obstack unoccr_obstack;
179
180/* Array where each element is the CUID if the insn that last set the hard
181   register with the number of the element, since the start of the current
182   basic block.
183
184   This array is used during the building of the hash table (step 1) to
185   determine if a reg is killed before the end of a basic block.
186
187   It is also used when eliminating partial redundancies (step 2) to see
188   if a reg was modified since the start of a basic block.  */
189static int *reg_avail_info;
190
191/* A list of insns that may modify memory within the current basic block.  */
192struct modifies_mem
193{
194  rtx_insn *insn;
195  struct modifies_mem *next;
196};
197static struct modifies_mem *modifies_mem_list;
198
199/* The modifies_mem structs also go on an obstack, only this obstack is
200   freed each time after completing the analysis or transformations on
201   a basic block.  So we allocate a dummy modifies_mem_obstack_bottom
202   object on the obstack to keep track of the bottom of the obstack.  */
203static struct obstack modifies_mem_obstack;
204static struct modifies_mem  *modifies_mem_obstack_bottom;
205
206/* Mapping of insn UIDs to CUIDs.
207   CUIDs are like UIDs except they increase monotonically in each basic
208   block, have no gaps, and only apply to real insns.  */
209static int *uid_cuid;
210#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
211
212/* Bitmap of blocks which have memory stores.  */
213static bitmap modify_mem_list_set;
214
215/* Bitmap of blocks which have calls.  */
216static bitmap blocks_with_calls;
217
218/* Vector indexed by block # with a list of all the insns that
219   modify memory within the block.  */
220static vec<rtx_insn *> *modify_mem_list;
221
222/* Vector indexed by block # with a canonicalized list of insns
223   that modify memory in the block.  */
224static vec<modify_pair> *canon_modify_mem_list;
225
226/* Vector of simple bitmaps indexed by block number.  Each component sbitmap
227   indicates which expressions are transparent through the block.  */
228static sbitmap *transp;
229
230
231/* Helpers for memory allocation/freeing.  */
232static void alloc_mem (void);
233static void free_mem (void);
234
235/* Support for hash table construction and transformations.  */
236static bool oprs_unchanged_p (rtx, rtx_insn *, bool);
237static void record_last_reg_set_info (rtx_insn *, rtx);
238static void record_last_reg_set_info_regno (rtx_insn *, int);
239static void record_last_mem_set_info (rtx_insn *);
240static void record_last_set_info (rtx, const_rtx, void *);
241static void record_opr_changes (rtx_insn *);
242
243static void find_mem_conflicts (rtx, const_rtx, void *);
244static int load_killed_in_block_p (int, rtx, bool);
245static void reset_opr_set_tables (void);
246
247/* Hash table support.  */
248static hashval_t hash_expr (rtx, int *);
249static void insert_expr_in_table (rtx, rtx_insn *);
250static struct expr *lookup_expr_in_table (rtx);
251static void dump_hash_table (FILE *);
252
253/* Helpers for eliminate_partially_redundant_load.  */
254static bool reg_killed_on_edge (rtx, edge);
255static bool reg_used_on_edge (rtx, edge);
256
257static rtx get_avail_load_store_reg (rtx_insn *);
258
259static bool bb_has_well_behaved_predecessors (basic_block);
260static struct occr* get_bb_avail_insn (basic_block, struct occr *, int);
261static void hash_scan_set (rtx_insn *);
262static void compute_hash_table (void);
263
264/* The work horses of this pass.  */
265static void eliminate_partially_redundant_load (basic_block,
266						rtx_insn *,
267						struct expr *);
268static void eliminate_partially_redundant_loads (void);
269
270
271/* Allocate memory for the CUID mapping array and register/memory
272   tracking tables.  */
273
274static void
275alloc_mem (void)
276{
277  int i;
278  basic_block bb;
279  rtx_insn *insn;
280
281  /* Find the largest UID and create a mapping from UIDs to CUIDs.  */
282  uid_cuid = XCNEWVEC (int, get_max_uid () + 1);
283  i = 1;
284  FOR_EACH_BB_FN (bb, cfun)
285    FOR_BB_INSNS (bb, insn)
286      {
287        if (INSN_P (insn))
288	  uid_cuid[INSN_UID (insn)] = i++;
289	else
290	  uid_cuid[INSN_UID (insn)] = i;
291      }
292
293  /* Allocate the available expressions hash table.  We don't want to
294     make the hash table too small, but unnecessarily making it too large
295     also doesn't help.  The i/4 is a gcse.c relic, and seems like a
296     reasonable choice.  */
297  expr_table = new hash_table<expr_hasher> (MAX (i / 4, 13));
298
299  /* We allocate everything on obstacks because we often can roll back
300     the whole obstack to some point.  Freeing obstacks is very fast.  */
301  gcc_obstack_init (&expr_obstack);
302  gcc_obstack_init (&occr_obstack);
303  gcc_obstack_init (&unoccr_obstack);
304  gcc_obstack_init (&modifies_mem_obstack);
305
306  /* Working array used to track the last set for each register
307     in the current block.  */
308  reg_avail_info = (int *) xmalloc (FIRST_PSEUDO_REGISTER * sizeof (int));
309
310  /* Put a dummy modifies_mem object on the modifies_mem_obstack, so we
311     can roll it back in reset_opr_set_tables.  */
312  modifies_mem_obstack_bottom =
313    (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
314					   sizeof (struct modifies_mem));
315
316  blocks_with_calls = BITMAP_ALLOC (NULL);
317  modify_mem_list_set = BITMAP_ALLOC (NULL);
318
319  modify_mem_list = (vec_rtx_heap *) xcalloc (last_basic_block_for_fn (cfun),
320					      sizeof (vec_rtx_heap));
321  canon_modify_mem_list
322    = (vec_modify_pair_heap *) xcalloc (last_basic_block_for_fn (cfun),
323					sizeof (vec_modify_pair_heap));
324}
325
326/* Free memory allocated by alloc_mem.  */
327
328static void
329free_mem (void)
330{
331  free (uid_cuid);
332
333  delete expr_table;
334  expr_table = NULL;
335
336  obstack_free (&expr_obstack, NULL);
337  obstack_free (&occr_obstack, NULL);
338  obstack_free (&unoccr_obstack, NULL);
339  obstack_free (&modifies_mem_obstack, NULL);
340
341  unsigned i;
342  bitmap_iterator bi;
343  EXECUTE_IF_SET_IN_BITMAP (modify_mem_list_set, 0, i, bi)
344    {
345      modify_mem_list[i].release ();
346      canon_modify_mem_list[i].release ();
347    }
348
349  BITMAP_FREE (blocks_with_calls);
350  BITMAP_FREE (modify_mem_list_set);
351  free (reg_avail_info);
352  free (modify_mem_list);
353  free (canon_modify_mem_list);
354}
355
356
357/* Insert expression X in INSN in the hash TABLE.
358   If it is already present, record it as the last occurrence in INSN's
359   basic block.  */
360
361static void
362insert_expr_in_table (rtx x, rtx_insn *insn)
363{
364  int do_not_record_p;
365  hashval_t hash;
366  struct expr *cur_expr, **slot;
367  struct occr *avail_occr, *last_occr = NULL;
368
369  hash = hash_expr (x, &do_not_record_p);
370
371  /* Do not insert expression in the table if it contains volatile operands,
372     or if hash_expr determines the expression is something we don't want
373     to or can't handle.  */
374  if (do_not_record_p)
375    return;
376
377  /* We anticipate that redundant expressions are rare, so for convenience
378     allocate a new hash table element here already and set its fields.
379     If we don't do this, we need a hack with a static struct expr.  Anyway,
380     obstack_free is really fast and one more obstack_alloc doesn't hurt if
381     we're going to see more expressions later on.  */
382  cur_expr = (struct expr *) obstack_alloc (&expr_obstack,
383					    sizeof (struct expr));
384  cur_expr->expr = x;
385  cur_expr->hash = hash;
386  cur_expr->avail_occr = NULL;
387
388  slot = expr_table->find_slot_with_hash (cur_expr, hash, INSERT);
389
390  if (! (*slot))
391    {
392      /* The expression isn't found, so insert it.  */
393      *slot = cur_expr;
394
395      /* Anytime we add an entry to the table, record the index
396	 of the new entry.  The bitmap index starts counting
397	 at zero.  */
398      cur_expr->bitmap_index = expr_table->elements () - 1;
399    }
400  else
401    {
402      /* The expression is already in the table, so roll back the
403	 obstack and use the existing table entry.  */
404      obstack_free (&expr_obstack, cur_expr);
405      cur_expr = *slot;
406    }
407
408  /* Search for another occurrence in the same basic block.  */
409  avail_occr = cur_expr->avail_occr;
410  while (avail_occr
411	 && BLOCK_FOR_INSN (avail_occr->insn) != BLOCK_FOR_INSN (insn))
412    {
413      /* If an occurrence isn't found, save a pointer to the end of
414	 the list.  */
415      last_occr = avail_occr;
416      avail_occr = avail_occr->next;
417    }
418
419  if (avail_occr)
420    /* Found another instance of the expression in the same basic block.
421       Prefer this occurrence to the currently recorded one.  We want
422       the last one in the block and the block is scanned from start
423       to end.  */
424    avail_occr->insn = insn;
425  else
426    {
427      /* First occurrence of this expression in this basic block.  */
428      avail_occr = (struct occr *) obstack_alloc (&occr_obstack,
429						  sizeof (struct occr));
430
431      /* First occurrence of this expression in any block?  */
432      if (cur_expr->avail_occr == NULL)
433        cur_expr->avail_occr = avail_occr;
434      else
435        last_occr->next = avail_occr;
436
437      avail_occr->insn = insn;
438      avail_occr->next = NULL;
439      avail_occr->deleted_p = 0;
440    }
441}
442
443
444/* Lookup pattern PAT in the expression hash table.
445   The result is a pointer to the table entry, or NULL if not found.  */
446
447static struct expr *
448lookup_expr_in_table (rtx pat)
449{
450  int do_not_record_p;
451  struct expr **slot, *tmp_expr;
452  hashval_t hash = hash_expr (pat, &do_not_record_p);
453
454  if (do_not_record_p)
455    return NULL;
456
457  tmp_expr = (struct expr *) obstack_alloc (&expr_obstack,
458					    sizeof (struct expr));
459  tmp_expr->expr = pat;
460  tmp_expr->hash = hash;
461  tmp_expr->avail_occr = NULL;
462
463  slot = expr_table->find_slot_with_hash (tmp_expr, hash, INSERT);
464  obstack_free (&expr_obstack, tmp_expr);
465
466  if (!slot)
467    return NULL;
468  else
469    return (*slot);
470}
471
472
473/* Dump all expressions and occurrences that are currently in the
474   expression hash table to FILE.  */
475
476/* This helper is called via htab_traverse.  */
477int
478dump_expr_hash_table_entry (expr **slot, FILE *file)
479{
480  struct expr *exprs = *slot;
481  struct occr *occr;
482
483  fprintf (file, "expr: ");
484  print_rtl (file, exprs->expr);
485  fprintf (file,"\nhashcode: %u\n", exprs->hash);
486  fprintf (file,"list of occurrences:\n");
487  occr = exprs->avail_occr;
488  while (occr)
489    {
490      rtx_insn *insn = occr->insn;
491      print_rtl_single (file, insn);
492      fprintf (file, "\n");
493      occr = occr->next;
494    }
495  fprintf (file, "\n");
496  return 1;
497}
498
499static void
500dump_hash_table (FILE *file)
501{
502  fprintf (file, "\n\nexpression hash table\n");
503  fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
504           (long) expr_table->size (),
505           (long) expr_table->elements (),
506           expr_table->collisions ());
507  if (expr_table->elements () > 0)
508    {
509      fprintf (file, "\n\ntable entries:\n");
510      expr_table->traverse <FILE *, dump_expr_hash_table_entry> (file);
511    }
512  fprintf (file, "\n");
513}
514
515/* Return true if register X is recorded as being set by an instruction
516   whose CUID is greater than the one given.  */
517
518static bool
519reg_changed_after_insn_p (rtx x, int cuid)
520{
521  unsigned int regno, end_regno;
522
523  regno = REGNO (x);
524  end_regno = END_REGNO (x);
525  do
526    if (reg_avail_info[regno] > cuid)
527      return true;
528  while (++regno < end_regno);
529  return false;
530}
531
532/* Return nonzero if the operands of expression X are unchanged
533   1) from the start of INSN's basic block up to but not including INSN
534      if AFTER_INSN is false, or
535   2) from INSN to the end of INSN's basic block if AFTER_INSN is true.  */
536
537static bool
538oprs_unchanged_p (rtx x, rtx_insn *insn, bool after_insn)
539{
540  int i, j;
541  enum rtx_code code;
542  const char *fmt;
543
544  if (x == 0)
545    return 1;
546
547  code = GET_CODE (x);
548  switch (code)
549    {
550    case REG:
551      /* We are called after register allocation.  */
552      gcc_assert (REGNO (x) < FIRST_PSEUDO_REGISTER);
553      if (after_insn)
554	return !reg_changed_after_insn_p (x, INSN_CUID (insn) - 1);
555      else
556	return !reg_changed_after_insn_p (x, 0);
557
558    case MEM:
559      if (load_killed_in_block_p (INSN_CUID (insn), x, after_insn))
560	return 0;
561      else
562	return oprs_unchanged_p (XEXP (x, 0), insn, after_insn);
563
564    case PC:
565    case CC0: /*FIXME*/
566    case CONST:
567    CASE_CONST_ANY:
568    case SYMBOL_REF:
569    case LABEL_REF:
570    case ADDR_VEC:
571    case ADDR_DIFF_VEC:
572      return 1;
573
574    case PRE_DEC:
575    case PRE_INC:
576    case POST_DEC:
577    case POST_INC:
578    case PRE_MODIFY:
579    case POST_MODIFY:
580      if (after_insn)
581	return 0;
582      break;
583
584    default:
585      break;
586    }
587
588  for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
589    {
590      if (fmt[i] == 'e')
591	{
592	  if (! oprs_unchanged_p (XEXP (x, i), insn, after_insn))
593	    return 0;
594	}
595      else if (fmt[i] == 'E')
596	for (j = 0; j < XVECLEN (x, i); j++)
597	  if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, after_insn))
598	    return 0;
599    }
600
601  return 1;
602}
603
604
605/* Used for communication between find_mem_conflicts and
606   load_killed_in_block_p.  Nonzero if find_mem_conflicts finds a
607   conflict between two memory references.
608   This is a bit of a hack to work around the limitations of note_stores.  */
609static int mems_conflict_p;
610
611/* DEST is the output of an instruction.  If it is a memory reference, and
612   possibly conflicts with the load found in DATA, then set mems_conflict_p
613   to a nonzero value.  */
614
615static void
616find_mem_conflicts (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
617		    void *data)
618{
619  rtx mem_op = (rtx) data;
620
621  while (GET_CODE (dest) == SUBREG
622	 || GET_CODE (dest) == ZERO_EXTRACT
623	 || GET_CODE (dest) == STRICT_LOW_PART)
624    dest = XEXP (dest, 0);
625
626  /* If DEST is not a MEM, then it will not conflict with the load.  Note
627     that function calls are assumed to clobber memory, but are handled
628     elsewhere.  */
629  if (! MEM_P (dest))
630    return;
631
632  if (true_dependence (dest, GET_MODE (dest), mem_op))
633    mems_conflict_p = 1;
634}
635
636
637/* Return nonzero if the expression in X (a memory reference) is killed
638   in the current basic block before (if AFTER_INSN is false) or after
639   (if AFTER_INSN is true) the insn with the CUID in UID_LIMIT.
640
641   This function assumes that the modifies_mem table is flushed when
642   the hash table construction or redundancy elimination phases start
643   processing a new basic block.  */
644
645static int
646load_killed_in_block_p (int uid_limit, rtx x, bool after_insn)
647{
648  struct modifies_mem *list_entry = modifies_mem_list;
649
650  while (list_entry)
651    {
652      rtx_insn *setter = list_entry->insn;
653
654      /* Ignore entries in the list that do not apply.  */
655      if ((after_insn
656	   && INSN_CUID (setter) < uid_limit)
657	  || (! after_insn
658	      && INSN_CUID (setter) > uid_limit))
659	{
660	  list_entry = list_entry->next;
661	  continue;
662	}
663
664      /* If SETTER is a call everything is clobbered.  Note that calls
665	 to pure functions are never put on the list, so we need not
666	 worry about them.  */
667      if (CALL_P (setter))
668	return 1;
669
670      /* SETTER must be an insn of some kind that sets memory.  Call
671	 note_stores to examine each hunk of memory that is modified.
672	 It will set mems_conflict_p to nonzero if there may be a
673	 conflict between X and SETTER.  */
674      mems_conflict_p = 0;
675      note_stores (PATTERN (setter), find_mem_conflicts, x);
676      if (mems_conflict_p)
677	return 1;
678
679      list_entry = list_entry->next;
680    }
681  return 0;
682}
683
684
685/* Record register first/last/block set information for REGNO in INSN.  */
686
687static inline void
688record_last_reg_set_info (rtx_insn *insn, rtx reg)
689{
690  unsigned int regno, end_regno;
691
692  regno = REGNO (reg);
693  end_regno = END_REGNO (reg);
694  do
695    reg_avail_info[regno] = INSN_CUID (insn);
696  while (++regno < end_regno);
697}
698
699static inline void
700record_last_reg_set_info_regno (rtx_insn *insn, int regno)
701{
702  reg_avail_info[regno] = INSN_CUID (insn);
703}
704
705
706/* Record memory modification information for INSN.  We do not actually care
707   about the memory location(s) that are set, or even how they are set (consider
708   a CALL_INSN).  We merely need to record which insns modify memory.  */
709
710static void
711record_last_mem_set_info (rtx_insn *insn)
712{
713  struct modifies_mem *list_entry;
714
715  list_entry = (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
716						      sizeof (struct modifies_mem));
717  list_entry->insn = insn;
718  list_entry->next = modifies_mem_list;
719  modifies_mem_list = list_entry;
720
721  record_last_mem_set_info_common (insn, modify_mem_list,
722				   canon_modify_mem_list,
723				   modify_mem_list_set,
724				   blocks_with_calls);
725}
726
727/* Called from compute_hash_table via note_stores to handle one
728   SET or CLOBBER in an insn.  DATA is really the instruction in which
729   the SET is taking place.  */
730
731static void
732record_last_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data)
733{
734  rtx_insn *last_set_insn = (rtx_insn *) data;
735
736  if (GET_CODE (dest) == SUBREG)
737    dest = SUBREG_REG (dest);
738
739  if (REG_P (dest))
740    record_last_reg_set_info (last_set_insn, dest);
741  else if (MEM_P (dest))
742    {
743      /* Ignore pushes, they don't clobber memory.  They may still
744	 clobber the stack pointer though.  Some targets do argument
745	 pushes without adding REG_INC notes.  See e.g. PR25196,
746	 where a pushsi2 on i386 doesn't have REG_INC notes.  Note
747	 such changes here too.  */
748      if (! push_operand (dest, GET_MODE (dest)))
749	record_last_mem_set_info (last_set_insn);
750      else
751	record_last_reg_set_info_regno (last_set_insn, STACK_POINTER_REGNUM);
752    }
753}
754
755
756/* Reset tables used to keep track of what's still available since the
757   start of the block.  */
758
759static void
760reset_opr_set_tables (void)
761{
762  memset (reg_avail_info, 0, FIRST_PSEUDO_REGISTER * sizeof (int));
763  obstack_free (&modifies_mem_obstack, modifies_mem_obstack_bottom);
764  modifies_mem_list = NULL;
765}
766
767
768/* Record things set by INSN.
769   This data is used by oprs_unchanged_p.  */
770
771static void
772record_opr_changes (rtx_insn *insn)
773{
774  rtx note;
775
776  /* Find all stores and record them.  */
777  note_stores (PATTERN (insn), record_last_set_info, insn);
778
779  /* Also record autoincremented REGs for this insn as changed.  */
780  for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
781    if (REG_NOTE_KIND (note) == REG_INC)
782      record_last_reg_set_info (insn, XEXP (note, 0));
783
784  /* Finally, if this is a call, record all call clobbers.  */
785  if (CALL_P (insn))
786    {
787      unsigned int regno;
788      rtx link, x;
789      hard_reg_set_iterator hrsi;
790      EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, regno, hrsi)
791	record_last_reg_set_info_regno (insn, regno);
792
793      for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
794	if (GET_CODE (XEXP (link, 0)) == CLOBBER)
795	  {
796	    x = XEXP (XEXP (link, 0), 0);
797	    if (REG_P (x))
798	      {
799		gcc_assert (HARD_REGISTER_P (x));
800		record_last_reg_set_info (insn, x);
801	      }
802	  }
803
804      if (! RTL_CONST_OR_PURE_CALL_P (insn))
805	record_last_mem_set_info (insn);
806    }
807}
808
809
810/* Scan the pattern of INSN and add an entry to the hash TABLE.
811   After reload we are interested in loads/stores only.  */
812
813static void
814hash_scan_set (rtx_insn *insn)
815{
816  rtx pat = PATTERN (insn);
817  rtx src = SET_SRC (pat);
818  rtx dest = SET_DEST (pat);
819
820  /* We are only interested in loads and stores.  */
821  if (! MEM_P (src) && ! MEM_P (dest))
822    return;
823
824  /* Don't mess with jumps and nops.  */
825  if (JUMP_P (insn) || set_noop_p (pat))
826    return;
827
828  if (REG_P (dest))
829    {
830      if (/* Don't CSE something if we can't do a reg/reg copy.  */
831	  can_copy_p (GET_MODE (dest))
832	  /* Is SET_SRC something we want to gcse?  */
833	  && general_operand (src, GET_MODE (src))
834#ifdef STACK_REGS
835	  /* Never consider insns touching the register stack.  It may
836	     create situations that reg-stack cannot handle (e.g. a stack
837	     register live across an abnormal edge).  */
838	  && (REGNO (dest) < FIRST_STACK_REG || REGNO (dest) > LAST_STACK_REG)
839#endif
840	  /* An expression is not available if its operands are
841	     subsequently modified, including this insn.  */
842	  && oprs_unchanged_p (src, insn, true))
843	{
844	  insert_expr_in_table (src, insn);
845	}
846    }
847  else if (REG_P (src))
848    {
849      /* Only record sets of pseudo-regs in the hash table.  */
850      if (/* Don't CSE something if we can't do a reg/reg copy.  */
851	  can_copy_p (GET_MODE (src))
852	  /* Is SET_DEST something we want to gcse?  */
853	  && general_operand (dest, GET_MODE (dest))
854#ifdef STACK_REGS
855	  /* As above for STACK_REGS.  */
856	  && (REGNO (src) < FIRST_STACK_REG || REGNO (src) > LAST_STACK_REG)
857#endif
858	  && ! (flag_float_store && FLOAT_MODE_P (GET_MODE (dest)))
859	  /* Check if the memory expression is killed after insn.  */
860	  && ! load_killed_in_block_p (INSN_CUID (insn) + 1, dest, true)
861	  && oprs_unchanged_p (XEXP (dest, 0), insn, true))
862	{
863	  insert_expr_in_table (dest, insn);
864	}
865    }
866}
867
868
869/* Create hash table of memory expressions available at end of basic
870   blocks.  Basically you should think of this hash table as the
871   representation of AVAIL_OUT.  This is the set of expressions that
872   is generated in a basic block and not killed before the end of the
873   same basic block.  Notice that this is really a local computation.  */
874
875static void
876compute_hash_table (void)
877{
878  basic_block bb;
879
880  FOR_EACH_BB_FN (bb, cfun)
881    {
882      rtx_insn *insn;
883
884      /* First pass over the instructions records information used to
885	 determine when registers and memory are last set.
886	 Since we compute a "local" AVAIL_OUT, reset the tables that
887	 help us keep track of what has been modified since the start
888	 of the block.  */
889      reset_opr_set_tables ();
890      FOR_BB_INSNS (bb, insn)
891	{
892	  if (INSN_P (insn))
893            record_opr_changes (insn);
894	}
895
896      /* The next pass actually builds the hash table.  */
897      FOR_BB_INSNS (bb, insn)
898	if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SET)
899	  hash_scan_set (insn);
900    }
901}
902
903
904/* Check if register REG is killed in any insn waiting to be inserted on
905   edge E.  This function is required to check that our data flow analysis
906   is still valid prior to commit_edge_insertions.  */
907
908static bool
909reg_killed_on_edge (rtx reg, edge e)
910{
911  rtx_insn *insn;
912
913  for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
914    if (INSN_P (insn) && reg_set_p (reg, insn))
915      return true;
916
917  return false;
918}
919
920/* Similar to above - check if register REG is used in any insn waiting
921   to be inserted on edge E.
922   Assumes no such insn can be a CALL_INSN; if so call reg_used_between_p
923   with PREV(insn),NEXT(insn) instead of calling reg_overlap_mentioned_p.  */
924
925static bool
926reg_used_on_edge (rtx reg, edge e)
927{
928  rtx_insn *insn;
929
930  for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
931    if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
932      return true;
933
934  return false;
935}
936
937/* Return the loaded/stored register of a load/store instruction.  */
938
939static rtx
940get_avail_load_store_reg (rtx_insn *insn)
941{
942  if (REG_P (SET_DEST (PATTERN (insn))))
943    /* A load.  */
944    return SET_DEST (PATTERN (insn));
945  else
946    {
947      /* A store.  */
948      gcc_assert (REG_P (SET_SRC (PATTERN (insn))));
949      return SET_SRC (PATTERN (insn));
950    }
951}
952
953/* Return nonzero if the predecessors of BB are "well behaved".  */
954
955static bool
956bb_has_well_behaved_predecessors (basic_block bb)
957{
958  edge pred;
959  edge_iterator ei;
960
961  if (EDGE_COUNT (bb->preds) == 0)
962    return false;
963
964  FOR_EACH_EDGE (pred, ei, bb->preds)
965    {
966      /* commit_one_edge_insertion refuses to insert on abnormal edges even if
967	 the source has only one successor so EDGE_CRITICAL_P is too weak.  */
968      if ((pred->flags & EDGE_ABNORMAL) && !single_pred_p (pred->dest))
969	return false;
970
971      if ((pred->flags & EDGE_ABNORMAL_CALL) && cfun->has_nonlocal_label)
972	return false;
973
974      if (tablejump_p (BB_END (pred->src), NULL, NULL))
975	return false;
976    }
977  return true;
978}
979
980
981/* Search for the occurrences of expression in BB.  */
982
983static struct occr*
984get_bb_avail_insn (basic_block bb, struct occr *orig_occr, int bitmap_index)
985{
986  struct occr *occr = orig_occr;
987
988  for (; occr != NULL; occr = occr->next)
989    if (BLOCK_FOR_INSN (occr->insn) == bb)
990      return occr;
991
992  /* If we could not find an occurrence in BB, see if BB
993     has a single predecessor with an occurrence that is
994     transparent through BB.  */
995  if (single_pred_p (bb)
996      && bitmap_bit_p (transp[bb->index], bitmap_index)
997      && (occr = get_bb_avail_insn (single_pred (bb), orig_occr, bitmap_index)))
998    {
999      rtx avail_reg = get_avail_load_store_reg (occr->insn);
1000      if (!reg_set_between_p (avail_reg,
1001			      PREV_INSN (BB_HEAD (bb)),
1002			      NEXT_INSN (BB_END (bb)))
1003	  && !reg_killed_on_edge (avail_reg, single_pred_edge (bb)))
1004	return occr;
1005    }
1006
1007  return NULL;
1008}
1009
1010
1011/* This helper is called via htab_traverse.  */
1012int
1013compute_expr_transp (expr **slot, FILE *dump_file ATTRIBUTE_UNUSED)
1014{
1015  struct expr *expr = *slot;
1016
1017  compute_transp (expr->expr, expr->bitmap_index, transp,
1018		  blocks_with_calls, modify_mem_list_set,
1019		  canon_modify_mem_list);
1020  return 1;
1021}
1022
1023/* This handles the case where several stores feed a partially redundant
1024   load. It checks if the redundancy elimination is possible and if it's
1025   worth it.
1026
1027   Redundancy elimination is possible if,
1028   1) None of the operands of an insn have been modified since the start
1029      of the current basic block.
1030   2) In any predecessor of the current basic block, the same expression
1031      is generated.
1032
1033   See the function body for the heuristics that determine if eliminating
1034   a redundancy is also worth doing, assuming it is possible.  */
1035
1036static void
1037eliminate_partially_redundant_load (basic_block bb, rtx_insn *insn,
1038				    struct expr *expr)
1039{
1040  edge pred;
1041  rtx_insn *avail_insn = NULL;
1042  rtx avail_reg;
1043  rtx dest, pat;
1044  struct occr *a_occr;
1045  struct unoccr *occr, *avail_occrs = NULL;
1046  struct unoccr *unoccr, *unavail_occrs = NULL, *rollback_unoccr = NULL;
1047  int npred_ok = 0;
1048  gcov_type ok_count = 0; /* Redundant load execution count.  */
1049  gcov_type critical_count = 0; /* Execution count of critical edges.  */
1050  edge_iterator ei;
1051  bool critical_edge_split = false;
1052
1053  /* The execution count of the loads to be added to make the
1054     load fully redundant.  */
1055  gcov_type not_ok_count = 0;
1056  basic_block pred_bb;
1057
1058  pat = PATTERN (insn);
1059  dest = SET_DEST (pat);
1060
1061  /* Check that the loaded register is not used, set, or killed from the
1062     beginning of the block.  */
1063  if (reg_changed_after_insn_p (dest, 0)
1064      || reg_used_between_p (dest, PREV_INSN (BB_HEAD (bb)), insn))
1065    return;
1066
1067  /* Check potential for replacing load with copy for predecessors.  */
1068  FOR_EACH_EDGE (pred, ei, bb->preds)
1069    {
1070      rtx_insn *next_pred_bb_end;
1071
1072      avail_insn = NULL;
1073      avail_reg = NULL_RTX;
1074      pred_bb = pred->src;
1075      for (a_occr = get_bb_avail_insn (pred_bb,
1076				       expr->avail_occr,
1077				       expr->bitmap_index);
1078	   a_occr;
1079	   a_occr = get_bb_avail_insn (pred_bb,
1080				       a_occr->next,
1081				       expr->bitmap_index))
1082	{
1083	  /* Check if the loaded register is not used.  */
1084	  avail_insn = a_occr->insn;
1085	  avail_reg = get_avail_load_store_reg (avail_insn);
1086	  gcc_assert (avail_reg);
1087
1088	  /* Make sure we can generate a move from register avail_reg to
1089	     dest.  */
1090	  rtx_insn *move = gen_move_insn (copy_rtx (dest),
1091					  copy_rtx (avail_reg));
1092	  extract_insn (move);
1093	  if (! constrain_operands (1, get_preferred_alternatives (insn,
1094								   pred_bb))
1095	      || reg_killed_on_edge (avail_reg, pred)
1096	      || reg_used_on_edge (dest, pred))
1097	    {
1098	      avail_insn = NULL;
1099	      continue;
1100	    }
1101	  next_pred_bb_end = NEXT_INSN (BB_END (BLOCK_FOR_INSN (avail_insn)));
1102	  if (!reg_set_between_p (avail_reg, avail_insn, next_pred_bb_end))
1103	    /* AVAIL_INSN remains non-null.  */
1104	    break;
1105	  else
1106	    avail_insn = NULL;
1107	}
1108
1109      if (EDGE_CRITICAL_P (pred))
1110	critical_count += pred->count;
1111
1112      if (avail_insn != NULL_RTX)
1113	{
1114	  npred_ok++;
1115	  ok_count += pred->count;
1116	  if (! set_noop_p (PATTERN (gen_move_insn (copy_rtx (dest),
1117						    copy_rtx (avail_reg)))))
1118	    {
1119	      /* Check if there is going to be a split.  */
1120	      if (EDGE_CRITICAL_P (pred))
1121		critical_edge_split = true;
1122	    }
1123	  else /* Its a dead move no need to generate.  */
1124	    continue;
1125	  occr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
1126						  sizeof (struct unoccr));
1127	  occr->insn = avail_insn;
1128	  occr->pred = pred;
1129	  occr->next = avail_occrs;
1130	  avail_occrs = occr;
1131	  if (! rollback_unoccr)
1132	    rollback_unoccr = occr;
1133	}
1134      else
1135	{
1136	  /* Adding a load on a critical edge will cause a split.  */
1137	  if (EDGE_CRITICAL_P (pred))
1138	    critical_edge_split = true;
1139	  not_ok_count += pred->count;
1140	  unoccr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
1141						    sizeof (struct unoccr));
1142	  unoccr->insn = NULL;
1143	  unoccr->pred = pred;
1144	  unoccr->next = unavail_occrs;
1145	  unavail_occrs = unoccr;
1146	  if (! rollback_unoccr)
1147	    rollback_unoccr = unoccr;
1148	}
1149    }
1150
1151  if (/* No load can be replaced by copy.  */
1152      npred_ok == 0
1153      /* Prevent exploding the code.  */
1154      || (optimize_bb_for_size_p (bb) && npred_ok > 1)
1155      /* If we don't have profile information we cannot tell if splitting
1156         a critical edge is profitable or not so don't do it.  */
1157      || ((! profile_info || ! flag_branch_probabilities
1158	   || targetm.cannot_modify_jumps_p ())
1159	  && critical_edge_split))
1160    goto cleanup;
1161
1162  /* Check if it's worth applying the partial redundancy elimination.  */
1163  if (ok_count < GCSE_AFTER_RELOAD_PARTIAL_FRACTION * not_ok_count)
1164    goto cleanup;
1165  if (ok_count < GCSE_AFTER_RELOAD_CRITICAL_FRACTION * critical_count)
1166    goto cleanup;
1167
1168  /* Generate moves to the loaded register from where
1169     the memory is available.  */
1170  for (occr = avail_occrs; occr; occr = occr->next)
1171    {
1172      avail_insn = occr->insn;
1173      pred = occr->pred;
1174      /* Set avail_reg to be the register having the value of the
1175	 memory.  */
1176      avail_reg = get_avail_load_store_reg (avail_insn);
1177      gcc_assert (avail_reg);
1178
1179      insert_insn_on_edge (gen_move_insn (copy_rtx (dest),
1180					  copy_rtx (avail_reg)),
1181			   pred);
1182      stats.moves_inserted++;
1183
1184      if (dump_file)
1185	fprintf (dump_file,
1186		 "generating move from %d to %d on edge from %d to %d\n",
1187		 REGNO (avail_reg),
1188		 REGNO (dest),
1189		 pred->src->index,
1190		 pred->dest->index);
1191    }
1192
1193  /* Regenerate loads where the memory is unavailable.  */
1194  for (unoccr = unavail_occrs; unoccr; unoccr = unoccr->next)
1195    {
1196      pred = unoccr->pred;
1197      insert_insn_on_edge (copy_insn (PATTERN (insn)), pred);
1198      stats.copies_inserted++;
1199
1200      if (dump_file)
1201	{
1202	  fprintf (dump_file,
1203		   "generating on edge from %d to %d a copy of load: ",
1204		   pred->src->index,
1205		   pred->dest->index);
1206	  print_rtl (dump_file, PATTERN (insn));
1207	  fprintf (dump_file, "\n");
1208	}
1209    }
1210
1211  /* Delete the insn if it is not available in this block and mark it
1212     for deletion if it is available. If insn is available it may help
1213     discover additional redundancies, so mark it for later deletion.  */
1214  for (a_occr = get_bb_avail_insn (bb, expr->avail_occr, expr->bitmap_index);
1215       a_occr && (a_occr->insn != insn);
1216       a_occr = get_bb_avail_insn (bb, a_occr->next, expr->bitmap_index))
1217    ;
1218
1219  if (!a_occr)
1220    {
1221      stats.insns_deleted++;
1222
1223      if (dump_file)
1224	{
1225	  fprintf (dump_file, "deleting insn:\n");
1226          print_rtl_single (dump_file, insn);
1227          fprintf (dump_file, "\n");
1228	}
1229      delete_insn (insn);
1230    }
1231  else
1232    a_occr->deleted_p = 1;
1233
1234cleanup:
1235  if (rollback_unoccr)
1236    obstack_free (&unoccr_obstack, rollback_unoccr);
1237}
1238
1239/* Performing the redundancy elimination as described before.  */
1240
1241static void
1242eliminate_partially_redundant_loads (void)
1243{
1244  rtx_insn *insn;
1245  basic_block bb;
1246
1247  /* Note we start at block 1.  */
1248
1249  if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1250    return;
1251
1252  FOR_BB_BETWEEN (bb,
1253		  ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->next_bb,
1254		  EXIT_BLOCK_PTR_FOR_FN (cfun),
1255		  next_bb)
1256    {
1257      /* Don't try anything on basic blocks with strange predecessors.  */
1258      if (! bb_has_well_behaved_predecessors (bb))
1259	continue;
1260
1261      /* Do not try anything on cold basic blocks.  */
1262      if (optimize_bb_for_size_p (bb))
1263	continue;
1264
1265      /* Reset the table of things changed since the start of the current
1266	 basic block.  */
1267      reset_opr_set_tables ();
1268
1269      /* Look at all insns in the current basic block and see if there are
1270	 any loads in it that we can record.  */
1271      FOR_BB_INSNS (bb, insn)
1272	{
1273	  /* Is it a load - of the form (set (reg) (mem))?  */
1274	  if (NONJUMP_INSN_P (insn)
1275              && GET_CODE (PATTERN (insn)) == SET
1276	      && REG_P (SET_DEST (PATTERN (insn)))
1277	      && MEM_P (SET_SRC (PATTERN (insn))))
1278	    {
1279	      rtx pat = PATTERN (insn);
1280	      rtx src = SET_SRC (pat);
1281	      struct expr *expr;
1282
1283	      if (!MEM_VOLATILE_P (src)
1284		  && GET_MODE (src) != BLKmode
1285		  && general_operand (src, GET_MODE (src))
1286		  /* Are the operands unchanged since the start of the
1287		     block?  */
1288		  && oprs_unchanged_p (src, insn, false)
1289		  && !(cfun->can_throw_non_call_exceptions && may_trap_p (src))
1290		  && !side_effects_p (src)
1291		  /* Is the expression recorded?  */
1292		  && (expr = lookup_expr_in_table (src)) != NULL)
1293		{
1294		  /* We now have a load (insn) and an available memory at
1295		     its BB start (expr). Try to remove the loads if it is
1296		     redundant.  */
1297		  eliminate_partially_redundant_load (bb, insn, expr);
1298		}
1299	    }
1300
1301	  /* Keep track of everything modified by this insn, so that we
1302	     know what has been modified since the start of the current
1303	     basic block.  */
1304	  if (INSN_P (insn))
1305	    record_opr_changes (insn);
1306	}
1307    }
1308
1309  commit_edge_insertions ();
1310}
1311
1312/* Go over the expression hash table and delete insns that were
1313   marked for later deletion.  */
1314
1315/* This helper is called via htab_traverse.  */
1316int
1317delete_redundant_insns_1 (expr **slot, void *data ATTRIBUTE_UNUSED)
1318{
1319  struct expr *exprs = *slot;
1320  struct occr *occr;
1321
1322  for (occr = exprs->avail_occr; occr != NULL; occr = occr->next)
1323    {
1324      if (occr->deleted_p && dbg_cnt (gcse2_delete))
1325	{
1326	  delete_insn (occr->insn);
1327	  stats.insns_deleted++;
1328
1329	  if (dump_file)
1330	    {
1331	      fprintf (dump_file, "deleting insn:\n");
1332	      print_rtl_single (dump_file, occr->insn);
1333	      fprintf (dump_file, "\n");
1334	    }
1335	}
1336    }
1337
1338  return 1;
1339}
1340
1341static void
1342delete_redundant_insns (void)
1343{
1344  expr_table->traverse <void *, delete_redundant_insns_1> (NULL);
1345  if (dump_file)
1346    fprintf (dump_file, "\n");
1347}
1348
1349/* Main entry point of the GCSE after reload - clean some redundant loads
1350   due to spilling.  */
1351
1352static void
1353gcse_after_reload_main (rtx f ATTRIBUTE_UNUSED)
1354{
1355
1356  memset (&stats, 0, sizeof (stats));
1357
1358  /* Allocate memory for this pass.
1359     Also computes and initializes the insns' CUIDs.  */
1360  alloc_mem ();
1361
1362  /* We need alias analysis.  */
1363  init_alias_analysis ();
1364
1365  compute_hash_table ();
1366
1367  if (dump_file)
1368    dump_hash_table (dump_file);
1369
1370  if (expr_table->elements () > 0)
1371    {
1372      /* Knowing which MEMs are transparent through a block can signifiantly
1373	 increase the number of redundant loads found.  So compute transparency
1374	 information for each memory expression in the hash table.  */
1375      df_analyze ();
1376      /* This can not be part of the normal allocation routine because
1377	 we have to know the number of elements in the hash table.  */
1378      transp = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
1379				     expr_table->elements ());
1380      bitmap_vector_ones (transp, last_basic_block_for_fn (cfun));
1381      expr_table->traverse <FILE *, compute_expr_transp> (dump_file);
1382      eliminate_partially_redundant_loads ();
1383      delete_redundant_insns ();
1384      sbitmap_vector_free (transp);
1385
1386      if (dump_file)
1387	{
1388	  fprintf (dump_file, "GCSE AFTER RELOAD stats:\n");
1389	  fprintf (dump_file, "copies inserted: %d\n", stats.copies_inserted);
1390	  fprintf (dump_file, "moves inserted:  %d\n", stats.moves_inserted);
1391	  fprintf (dump_file, "insns deleted:   %d\n", stats.insns_deleted);
1392	  fprintf (dump_file, "\n\n");
1393	}
1394
1395      statistics_counter_event (cfun, "copies inserted",
1396				stats.copies_inserted);
1397      statistics_counter_event (cfun, "moves inserted",
1398				stats.moves_inserted);
1399      statistics_counter_event (cfun, "insns deleted",
1400				stats.insns_deleted);
1401    }
1402
1403  /* We are finished with alias.  */
1404  end_alias_analysis ();
1405
1406  free_mem ();
1407}
1408
1409
1410
1411static unsigned int
1412rest_of_handle_gcse2 (void)
1413{
1414  gcse_after_reload_main (get_insns ());
1415  rebuild_jump_labels (get_insns ());
1416  return 0;
1417}
1418
1419namespace {
1420
1421const pass_data pass_data_gcse2 =
1422{
1423  RTL_PASS, /* type */
1424  "gcse2", /* name */
1425  OPTGROUP_NONE, /* optinfo_flags */
1426  TV_GCSE_AFTER_RELOAD, /* tv_id */
1427  0, /* properties_required */
1428  0, /* properties_provided */
1429  0, /* properties_destroyed */
1430  0, /* todo_flags_start */
1431  0, /* todo_flags_finish */
1432};
1433
1434class pass_gcse2 : public rtl_opt_pass
1435{
1436public:
1437  pass_gcse2 (gcc::context *ctxt)
1438    : rtl_opt_pass (pass_data_gcse2, ctxt)
1439  {}
1440
1441  /* opt_pass methods: */
1442  virtual bool gate (function *fun)
1443    {
1444      return (optimize > 0 && flag_gcse_after_reload
1445	      && optimize_function_for_speed_p (fun));
1446    }
1447
1448  virtual unsigned int execute (function *) { return rest_of_handle_gcse2 (); }
1449
1450}; // class pass_gcse2
1451
1452} // anon namespace
1453
1454rtl_opt_pass *
1455make_pass_gcse2 (gcc::context *ctxt)
1456{
1457  return new pass_gcse2 (ctxt);
1458}
1459