1/* Post reload partially redundant load elimination
2   Copyright (C) 2004-2020 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "backend.h"
24#include "target.h"
25#include "rtl.h"
26#include "tree.h"
27#include "predict.h"
28#include "df.h"
29#include "memmodel.h"
30#include "tm_p.h"
31#include "insn-config.h"
32#include "emit-rtl.h"
33#include "recog.h"
34
35#include "cfgrtl.h"
36#include "profile.h"
37#include "expr.h"
38#include "tree-pass.h"
39#include "dbgcnt.h"
40#include "intl.h"
41#include "gcse-common.h"
42#include "gcse.h"
43#include "regs.h"
44#include "function-abi.h"
45
46/* The following code implements gcse after reload, the purpose of this
47   pass is to cleanup redundant loads generated by reload and other
48   optimizations that come after gcse. It searches for simple inter-block
49   redundancies and tries to eliminate them by adding moves and loads
50   in cold places.
51
52   Perform partially redundant load elimination, try to eliminate redundant
53   loads created by the reload pass.  We try to look for full or partial
54   redundant loads fed by one or more loads/stores in predecessor BBs,
55   and try adding loads to make them fully redundant.  We also check if
56   it's worth adding loads to be able to delete the redundant load.
57
58   Algorithm:
59   1. Build available expressions hash table:
60       For each load/store instruction, if the loaded/stored memory didn't
61       change until the end of the basic block add this memory expression to
62       the hash table.
63   2. Perform Redundancy elimination:
64      For each load instruction do the following:
65	 perform partial redundancy elimination, check if it's worth adding
66	 loads to make the load fully redundant.  If so add loads and
67	 register copies and delete the load.
68   3. Delete instructions made redundant in step 2.
69
70   Future enhancement:
71     If the loaded register is used/defined between load and some store,
72     look for some other free register between load and all its stores,
73     and replace the load with a copy from this register to the loaded
74     register.
75*/
76
77
78/* Keep statistics of this pass.  */
79static struct
80{
81  int moves_inserted;
82  int copies_inserted;
83  int insns_deleted;
84} stats;
85
86/* We need to keep a hash table of expressions.  The table entries are of
87   type 'struct expr', and for each expression there is a single linked
88   list of occurrences.  */
89
90/* Expression elements in the hash table.  */
91struct expr
92{
93  /* The expression (SET_SRC for expressions, PATTERN for assignments).  */
94  rtx expr;
95
96  /* The same hash for this entry.  */
97  hashval_t hash;
98
99  /* Index in the transparent bitmaps.  */
100  unsigned int bitmap_index;
101
102  /* List of available occurrence in basic blocks in the function.  */
103  struct occr *avail_occr;
104};
105
106/* Hashtable helpers.  */
107
108struct expr_hasher : nofree_ptr_hash <expr>
109{
110  static inline hashval_t hash (const expr *);
111  static inline bool equal (const expr *, const expr *);
112};
113
114
115/* Hash expression X.
116   DO_NOT_RECORD_P is a boolean indicating if a volatile operand is found
117   or if the expression contains something we don't want to insert in the
118   table.  */
119
120static hashval_t
121hash_expr (rtx x, int *do_not_record_p)
122{
123  *do_not_record_p = 0;
124  return hash_rtx (x, GET_MODE (x), do_not_record_p,
125		   NULL,  /*have_reg_qty=*/false);
126}
127
128/* Callback for hashtab.
129   Return the hash value for expression EXP.  We don't actually hash
130   here, we just return the cached hash value.  */
131
132inline hashval_t
133expr_hasher::hash (const expr *exp)
134{
135  return exp->hash;
136}
137
138/* Callback for hashtab.
139   Return nonzero if exp1 is equivalent to exp2.  */
140
141inline bool
142expr_hasher::equal (const expr *exp1, const expr *exp2)
143{
144  int equiv_p = exp_equiv_p (exp1->expr, exp2->expr, 0, true);
145
146  gcc_assert (!equiv_p || exp1->hash == exp2->hash);
147  return equiv_p;
148}
149
150/* The table itself.  */
151static hash_table<expr_hasher> *expr_table;
152
153
154static struct obstack expr_obstack;
155
156/* Occurrence of an expression.
157   There is at most one occurrence per basic block.  If a pattern appears
158   more than once, the last appearance is used.  */
159
160struct occr
161{
162  /* Next occurrence of this expression.  */
163  struct occr *next;
164  /* The insn that computes the expression.  */
165  rtx_insn *insn;
166  /* Nonzero if this [anticipatable] occurrence has been deleted.  */
167  char deleted_p;
168};
169
170static struct obstack occr_obstack;
171
172/* The following structure holds the information about the occurrences of
173   the redundant instructions.  */
174struct unoccr
175{
176  struct unoccr *next;
177  edge pred;
178  rtx_insn *insn;
179};
180
181static struct obstack unoccr_obstack;
182
183/* Array where each element is the CUID if the insn that last set the hard
184   register with the number of the element, since the start of the current
185   basic block.
186
187   This array is used during the building of the hash table (step 1) to
188   determine if a reg is killed before the end of a basic block.
189
190   It is also used when eliminating partial redundancies (step 2) to see
191   if a reg was modified since the start of a basic block.  */
192static int *reg_avail_info;
193
194/* A list of insns that may modify memory within the current basic block.  */
195struct modifies_mem
196{
197  rtx_insn *insn;
198  struct modifies_mem *next;
199};
200static struct modifies_mem *modifies_mem_list;
201
202/* The modifies_mem structs also go on an obstack, only this obstack is
203   freed each time after completing the analysis or transformations on
204   a basic block.  So we allocate a dummy modifies_mem_obstack_bottom
205   object on the obstack to keep track of the bottom of the obstack.  */
206static struct obstack modifies_mem_obstack;
207static struct modifies_mem  *modifies_mem_obstack_bottom;
208
209/* Mapping of insn UIDs to CUIDs.
210   CUIDs are like UIDs except they increase monotonically in each basic
211   block, have no gaps, and only apply to real insns.  */
212static int *uid_cuid;
213#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
214
215/* Bitmap of blocks which have memory stores.  */
216static bitmap modify_mem_list_set;
217
218/* Bitmap of blocks which have calls.  */
219static bitmap blocks_with_calls;
220
221/* Vector indexed by block # with a list of all the insns that
222   modify memory within the block.  */
223static vec<rtx_insn *> *modify_mem_list;
224
225/* Vector indexed by block # with a canonicalized list of insns
226   that modify memory in the block.  */
227static vec<modify_pair> *canon_modify_mem_list;
228
229/* Vector of simple bitmaps indexed by block number.  Each component sbitmap
230   indicates which expressions are transparent through the block.  */
231static sbitmap *transp;
232
233
234/* Helpers for memory allocation/freeing.  */
235static void alloc_mem (void);
236static void free_mem (void);
237
238/* Support for hash table construction and transformations.  */
239static bool oprs_unchanged_p (rtx, rtx_insn *, bool);
240static void record_last_reg_set_info (rtx_insn *, rtx);
241static void record_last_reg_set_info_regno (rtx_insn *, int);
242static void record_last_mem_set_info (rtx_insn *);
243static void record_last_set_info (rtx, const_rtx, void *);
244static void record_opr_changes (rtx_insn *);
245
246static void find_mem_conflicts (rtx, const_rtx, void *);
247static int load_killed_in_block_p (int, rtx, bool);
248static void reset_opr_set_tables (void);
249
250/* Hash table support.  */
251static hashval_t hash_expr (rtx, int *);
252static void insert_expr_in_table (rtx, rtx_insn *);
253static struct expr *lookup_expr_in_table (rtx);
254static void dump_hash_table (FILE *);
255
256/* Helpers for eliminate_partially_redundant_load.  */
257static bool reg_killed_on_edge (rtx, edge);
258static bool reg_used_on_edge (rtx, edge);
259
260static rtx get_avail_load_store_reg (rtx_insn *);
261
262static bool bb_has_well_behaved_predecessors (basic_block);
263static struct occr* get_bb_avail_insn (basic_block, struct occr *, int);
264static void hash_scan_set (rtx_insn *);
265static void compute_hash_table (void);
266
267/* The work horses of this pass.  */
268static void eliminate_partially_redundant_load (basic_block,
269						rtx_insn *,
270						struct expr *);
271static void eliminate_partially_redundant_loads (void);
272
273
274/* Allocate memory for the CUID mapping array and register/memory
275   tracking tables.  */
276
277static void
278alloc_mem (void)
279{
280  int i;
281  basic_block bb;
282  rtx_insn *insn;
283
284  /* Find the largest UID and create a mapping from UIDs to CUIDs.  */
285  uid_cuid = XCNEWVEC (int, get_max_uid () + 1);
286  i = 1;
287  FOR_EACH_BB_FN (bb, cfun)
288    FOR_BB_INSNS (bb, insn)
289      {
290        if (INSN_P (insn))
291	  uid_cuid[INSN_UID (insn)] = i++;
292	else
293	  uid_cuid[INSN_UID (insn)] = i;
294      }
295
296  /* Allocate the available expressions hash table.  We don't want to
297     make the hash table too small, but unnecessarily making it too large
298     also doesn't help.  The i/4 is a gcse.c relic, and seems like a
299     reasonable choice.  */
300  expr_table = new hash_table<expr_hasher> (MAX (i / 4, 13));
301
302  /* We allocate everything on obstacks because we often can roll back
303     the whole obstack to some point.  Freeing obstacks is very fast.  */
304  gcc_obstack_init (&expr_obstack);
305  gcc_obstack_init (&occr_obstack);
306  gcc_obstack_init (&unoccr_obstack);
307  gcc_obstack_init (&modifies_mem_obstack);
308
309  /* Working array used to track the last set for each register
310     in the current block.  */
311  reg_avail_info = (int *) xmalloc (FIRST_PSEUDO_REGISTER * sizeof (int));
312
313  /* Put a dummy modifies_mem object on the modifies_mem_obstack, so we
314     can roll it back in reset_opr_set_tables.  */
315  modifies_mem_obstack_bottom =
316    (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
317					   sizeof (struct modifies_mem));
318
319  blocks_with_calls = BITMAP_ALLOC (NULL);
320  modify_mem_list_set = BITMAP_ALLOC (NULL);
321
322  modify_mem_list = (vec_rtx_heap *) xcalloc (last_basic_block_for_fn (cfun),
323					      sizeof (vec_rtx_heap));
324  canon_modify_mem_list
325    = (vec_modify_pair_heap *) xcalloc (last_basic_block_for_fn (cfun),
326					sizeof (vec_modify_pair_heap));
327}
328
329/* Free memory allocated by alloc_mem.  */
330
331static void
332free_mem (void)
333{
334  free (uid_cuid);
335
336  delete expr_table;
337  expr_table = NULL;
338
339  obstack_free (&expr_obstack, NULL);
340  obstack_free (&occr_obstack, NULL);
341  obstack_free (&unoccr_obstack, NULL);
342  obstack_free (&modifies_mem_obstack, NULL);
343
344  unsigned i;
345  bitmap_iterator bi;
346  EXECUTE_IF_SET_IN_BITMAP (modify_mem_list_set, 0, i, bi)
347    {
348      modify_mem_list[i].release ();
349      canon_modify_mem_list[i].release ();
350    }
351
352  BITMAP_FREE (blocks_with_calls);
353  BITMAP_FREE (modify_mem_list_set);
354  free (reg_avail_info);
355  free (modify_mem_list);
356  free (canon_modify_mem_list);
357}
358
359
360/* Insert expression X in INSN in the hash TABLE.
361   If it is already present, record it as the last occurrence in INSN's
362   basic block.  */
363
364static void
365insert_expr_in_table (rtx x, rtx_insn *insn)
366{
367  int do_not_record_p;
368  hashval_t hash;
369  struct expr *cur_expr, **slot;
370  struct occr *avail_occr;
371
372  hash = hash_expr (x, &do_not_record_p);
373
374  /* Do not insert expression in the table if it contains volatile operands,
375     or if hash_expr determines the expression is something we don't want
376     to or can't handle.  */
377  if (do_not_record_p)
378    return;
379
380  /* We anticipate that redundant expressions are rare, so for convenience
381     allocate a new hash table element here already and set its fields.
382     If we don't do this, we need a hack with a static struct expr.  Anyway,
383     obstack_free is really fast and one more obstack_alloc doesn't hurt if
384     we're going to see more expressions later on.  */
385  cur_expr = (struct expr *) obstack_alloc (&expr_obstack,
386					    sizeof (struct expr));
387  cur_expr->expr = x;
388  cur_expr->hash = hash;
389  cur_expr->avail_occr = NULL;
390
391  slot = expr_table->find_slot_with_hash (cur_expr, hash, INSERT);
392
393  if (! (*slot))
394    {
395      /* The expression isn't found, so insert it.  */
396      *slot = cur_expr;
397
398      /* Anytime we add an entry to the table, record the index
399	 of the new entry.  The bitmap index starts counting
400	 at zero.  */
401      cur_expr->bitmap_index = expr_table->elements () - 1;
402    }
403  else
404    {
405      /* The expression is already in the table, so roll back the
406	 obstack and use the existing table entry.  */
407      obstack_free (&expr_obstack, cur_expr);
408      cur_expr = *slot;
409    }
410
411  /* Search for another occurrence in the same basic block.  We insert
412     insns blockwise from start to end, so keep appending to the
413     start of the list so we have to check only a single element.  */
414  avail_occr = cur_expr->avail_occr;
415  if (avail_occr
416      && BLOCK_FOR_INSN (avail_occr->insn) == BLOCK_FOR_INSN (insn))
417    avail_occr->insn = insn;
418  else
419    {
420      /* First occurrence of this expression in this basic block.  */
421      avail_occr = (struct occr *) obstack_alloc (&occr_obstack,
422						  sizeof (struct occr));
423      avail_occr->insn = insn;
424      avail_occr->next = cur_expr->avail_occr;
425      avail_occr->deleted_p = 0;
426      cur_expr->avail_occr = avail_occr;
427    }
428}
429
430
431/* Lookup pattern PAT in the expression hash table.
432   The result is a pointer to the table entry, or NULL if not found.  */
433
434static struct expr *
435lookup_expr_in_table (rtx pat)
436{
437  int do_not_record_p;
438  struct expr **slot, *tmp_expr;
439  hashval_t hash = hash_expr (pat, &do_not_record_p);
440
441  if (do_not_record_p)
442    return NULL;
443
444  tmp_expr = (struct expr *) obstack_alloc (&expr_obstack,
445					    sizeof (struct expr));
446  tmp_expr->expr = pat;
447  tmp_expr->hash = hash;
448  tmp_expr->avail_occr = NULL;
449
450  slot = expr_table->find_slot_with_hash (tmp_expr, hash, INSERT);
451  obstack_free (&expr_obstack, tmp_expr);
452
453  if (!slot)
454    return NULL;
455  else
456    return (*slot);
457}
458
459
460/* Dump all expressions and occurrences that are currently in the
461   expression hash table to FILE.  */
462
463/* This helper is called via htab_traverse.  */
464int
465dump_expr_hash_table_entry (expr **slot, FILE *file)
466{
467  struct expr *exprs = *slot;
468  struct occr *occr;
469
470  fprintf (file, "expr: ");
471  print_rtl (file, exprs->expr);
472  fprintf (file,"\nhashcode: %u\n", exprs->hash);
473  fprintf (file,"list of occurrences:\n");
474  occr = exprs->avail_occr;
475  while (occr)
476    {
477      rtx_insn *insn = occr->insn;
478      print_rtl_single (file, insn);
479      fprintf (file, "\n");
480      occr = occr->next;
481    }
482  fprintf (file, "\n");
483  return 1;
484}
485
486static void
487dump_hash_table (FILE *file)
488{
489  fprintf (file, "\n\nexpression hash table\n");
490  fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
491           (long) expr_table->size (),
492           (long) expr_table->elements (),
493           expr_table->collisions ());
494  if (!expr_table->is_empty ())
495    {
496      fprintf (file, "\n\ntable entries:\n");
497      expr_table->traverse <FILE *, dump_expr_hash_table_entry> (file);
498    }
499  fprintf (file, "\n");
500}
501
502/* Return true if register X is recorded as being set by an instruction
503   whose CUID is greater than the one given.  */
504
505static bool
506reg_changed_after_insn_p (rtx x, int cuid)
507{
508  unsigned int regno, end_regno;
509
510  regno = REGNO (x);
511  end_regno = END_REGNO (x);
512  do
513    if (reg_avail_info[regno] > cuid)
514      return true;
515  while (++regno < end_regno);
516  return false;
517}
518
519/* Return nonzero if the operands of expression X are unchanged
520   1) from the start of INSN's basic block up to but not including INSN
521      if AFTER_INSN is false, or
522   2) from INSN to the end of INSN's basic block if AFTER_INSN is true.  */
523
524static bool
525oprs_unchanged_p (rtx x, rtx_insn *insn, bool after_insn)
526{
527  int i, j;
528  enum rtx_code code;
529  const char *fmt;
530
531  if (x == 0)
532    return 1;
533
534  code = GET_CODE (x);
535  switch (code)
536    {
537    case REG:
538      /* We are called after register allocation.  */
539      gcc_assert (REGNO (x) < FIRST_PSEUDO_REGISTER);
540      if (after_insn)
541	return !reg_changed_after_insn_p (x, INSN_CUID (insn) - 1);
542      else
543	return !reg_changed_after_insn_p (x, 0);
544
545    case MEM:
546      if (load_killed_in_block_p (INSN_CUID (insn), x, after_insn))
547	return 0;
548      else
549	return oprs_unchanged_p (XEXP (x, 0), insn, after_insn);
550
551    case PC:
552    case CC0: /*FIXME*/
553    case CONST:
554    CASE_CONST_ANY:
555    case SYMBOL_REF:
556    case LABEL_REF:
557    case ADDR_VEC:
558    case ADDR_DIFF_VEC:
559      return 1;
560
561    case PRE_DEC:
562    case PRE_INC:
563    case POST_DEC:
564    case POST_INC:
565    case PRE_MODIFY:
566    case POST_MODIFY:
567      if (after_insn)
568	return 0;
569      break;
570
571    default:
572      break;
573    }
574
575  for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
576    {
577      if (fmt[i] == 'e')
578	{
579	  if (! oprs_unchanged_p (XEXP (x, i), insn, after_insn))
580	    return 0;
581	}
582      else if (fmt[i] == 'E')
583	for (j = 0; j < XVECLEN (x, i); j++)
584	  if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, after_insn))
585	    return 0;
586    }
587
588  return 1;
589}
590
591
592/* Used for communication between find_mem_conflicts and
593   load_killed_in_block_p.  Nonzero if find_mem_conflicts finds a
594   conflict between two memory references.
595   This is a bit of a hack to work around the limitations of note_stores.  */
596static int mems_conflict_p;
597
598/* DEST is the output of an instruction.  If it is a memory reference, and
599   possibly conflicts with the load found in DATA, then set mems_conflict_p
600   to a nonzero value.  */
601
602static void
603find_mem_conflicts (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
604		    void *data)
605{
606  rtx mem_op = (rtx) data;
607
608  while (GET_CODE (dest) == SUBREG
609	 || GET_CODE (dest) == ZERO_EXTRACT
610	 || GET_CODE (dest) == STRICT_LOW_PART)
611    dest = XEXP (dest, 0);
612
613  /* If DEST is not a MEM, then it will not conflict with the load.  Note
614     that function calls are assumed to clobber memory, but are handled
615     elsewhere.  */
616  if (! MEM_P (dest))
617    return;
618
619  if (true_dependence (dest, GET_MODE (dest), mem_op))
620    mems_conflict_p = 1;
621}
622
623
624/* Return nonzero if the expression in X (a memory reference) is killed
625   in the current basic block before (if AFTER_INSN is false) or after
626   (if AFTER_INSN is true) the insn with the CUID in UID_LIMIT.
627
628   This function assumes that the modifies_mem table is flushed when
629   the hash table construction or redundancy elimination phases start
630   processing a new basic block.  */
631
632static int
633load_killed_in_block_p (int uid_limit, rtx x, bool after_insn)
634{
635  struct modifies_mem *list_entry = modifies_mem_list;
636
637  while (list_entry)
638    {
639      rtx_insn *setter = list_entry->insn;
640
641      /* Ignore entries in the list that do not apply.  */
642      if ((after_insn
643	   && INSN_CUID (setter) < uid_limit)
644	  || (! after_insn
645	      && INSN_CUID (setter) > uid_limit))
646	{
647	  list_entry = list_entry->next;
648	  continue;
649	}
650
651      /* If SETTER is a call everything is clobbered.  Note that calls
652	 to pure functions are never put on the list, so we need not
653	 worry about them.  */
654      if (CALL_P (setter))
655	return 1;
656
657      /* SETTER must be an insn of some kind that sets memory.  Call
658	 note_stores to examine each hunk of memory that is modified.
659	 It will set mems_conflict_p to nonzero if there may be a
660	 conflict between X and SETTER.  */
661      mems_conflict_p = 0;
662      note_stores (setter, find_mem_conflicts, x);
663      if (mems_conflict_p)
664	return 1;
665
666      list_entry = list_entry->next;
667    }
668  return 0;
669}
670
671
672/* Record register first/last/block set information for REGNO in INSN.  */
673
674static inline void
675record_last_reg_set_info (rtx_insn *insn, rtx reg)
676{
677  unsigned int regno, end_regno;
678
679  regno = REGNO (reg);
680  end_regno = END_REGNO (reg);
681  do
682    reg_avail_info[regno] = INSN_CUID (insn);
683  while (++regno < end_regno);
684}
685
686static inline void
687record_last_reg_set_info_regno (rtx_insn *insn, int regno)
688{
689  reg_avail_info[regno] = INSN_CUID (insn);
690}
691
692
693/* Record memory modification information for INSN.  We do not actually care
694   about the memory location(s) that are set, or even how they are set (consider
695   a CALL_INSN).  We merely need to record which insns modify memory.  */
696
697static void
698record_last_mem_set_info (rtx_insn *insn)
699{
700  struct modifies_mem *list_entry;
701
702  list_entry = (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
703						      sizeof (struct modifies_mem));
704  list_entry->insn = insn;
705  list_entry->next = modifies_mem_list;
706  modifies_mem_list = list_entry;
707
708  record_last_mem_set_info_common (insn, modify_mem_list,
709				   canon_modify_mem_list,
710				   modify_mem_list_set,
711				   blocks_with_calls);
712}
713
714/* Called from compute_hash_table via note_stores to handle one
715   SET or CLOBBER in an insn.  DATA is really the instruction in which
716   the SET is taking place.  */
717
718static void
719record_last_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data)
720{
721  rtx_insn *last_set_insn = (rtx_insn *) data;
722
723  if (GET_CODE (dest) == SUBREG)
724    dest = SUBREG_REG (dest);
725
726  if (REG_P (dest))
727    record_last_reg_set_info (last_set_insn, dest);
728  else if (MEM_P (dest))
729    {
730      /* Ignore pushes, they don't clobber memory.  They may still
731	 clobber the stack pointer though.  Some targets do argument
732	 pushes without adding REG_INC notes.  See e.g. PR25196,
733	 where a pushsi2 on i386 doesn't have REG_INC notes.  Note
734	 such changes here too.  */
735      if (! push_operand (dest, GET_MODE (dest)))
736	record_last_mem_set_info (last_set_insn);
737      else
738	record_last_reg_set_info_regno (last_set_insn, STACK_POINTER_REGNUM);
739    }
740}
741
742
743/* Reset tables used to keep track of what's still available since the
744   start of the block.  */
745
746static void
747reset_opr_set_tables (void)
748{
749  memset (reg_avail_info, 0, FIRST_PSEUDO_REGISTER * sizeof (int));
750  obstack_free (&modifies_mem_obstack, modifies_mem_obstack_bottom);
751  modifies_mem_list = NULL;
752}
753
754
755/* Record things set by INSN.
756   This data is used by oprs_unchanged_p.  */
757
758static void
759record_opr_changes (rtx_insn *insn)
760{
761  rtx note;
762
763  /* Find all stores and record them.  */
764  note_stores (insn, record_last_set_info, insn);
765
766  /* Also record autoincremented REGs for this insn as changed.  */
767  for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
768    if (REG_NOTE_KIND (note) == REG_INC)
769      record_last_reg_set_info (insn, XEXP (note, 0));
770
771  /* Finally, if this is a call, record all call clobbers.  */
772  if (CALL_P (insn))
773    {
774      unsigned int regno;
775      hard_reg_set_iterator hrsi;
776      /* We don't track modes of hard registers, so we need to be
777	 conservative and assume that partial kills are full kills.  */
778      HARD_REG_SET callee_clobbers
779	= insn_callee_abi (insn).full_and_partial_reg_clobbers ();
780      EXECUTE_IF_SET_IN_HARD_REG_SET (callee_clobbers, 0, regno, hrsi)
781	record_last_reg_set_info_regno (insn, regno);
782
783      if (! RTL_CONST_OR_PURE_CALL_P (insn))
784	record_last_mem_set_info (insn);
785    }
786}
787
788
789/* Scan the pattern of INSN and add an entry to the hash TABLE.
790   After reload we are interested in loads/stores only.  */
791
792static void
793hash_scan_set (rtx_insn *insn)
794{
795  rtx pat = PATTERN (insn);
796  rtx src = SET_SRC (pat);
797  rtx dest = SET_DEST (pat);
798
799  /* We are only interested in loads and stores.  */
800  if (! MEM_P (src) && ! MEM_P (dest))
801    return;
802
803  /* Don't mess with jumps and nops.  */
804  if (JUMP_P (insn) || set_noop_p (pat))
805    return;
806
807  if (REG_P (dest))
808    {
809      if (/* Don't CSE something if we can't do a reg/reg copy.  */
810	  can_copy_p (GET_MODE (dest))
811	  /* Is SET_SRC something we want to gcse?  */
812	  && general_operand (src, GET_MODE (src))
813#ifdef STACK_REGS
814	  /* Never consider insns touching the register stack.  It may
815	     create situations that reg-stack cannot handle (e.g. a stack
816	     register live across an abnormal edge).  */
817	  && (REGNO (dest) < FIRST_STACK_REG || REGNO (dest) > LAST_STACK_REG)
818#endif
819	  /* An expression is not available if its operands are
820	     subsequently modified, including this insn.  */
821	  && oprs_unchanged_p (src, insn, true))
822	{
823	  insert_expr_in_table (src, insn);
824	}
825    }
826  else if (REG_P (src))
827    {
828      /* Only record sets of pseudo-regs in the hash table.  */
829      if (/* Don't CSE something if we can't do a reg/reg copy.  */
830	  can_copy_p (GET_MODE (src))
831	  /* Is SET_DEST something we want to gcse?  */
832	  && general_operand (dest, GET_MODE (dest))
833#ifdef STACK_REGS
834	  /* As above for STACK_REGS.  */
835	  && (REGNO (src) < FIRST_STACK_REG || REGNO (src) > LAST_STACK_REG)
836#endif
837	  && ! (flag_float_store && FLOAT_MODE_P (GET_MODE (dest)))
838	  /* Check if the memory expression is killed after insn.  */
839	  && ! load_killed_in_block_p (INSN_CUID (insn) + 1, dest, true)
840	  && oprs_unchanged_p (XEXP (dest, 0), insn, true))
841	{
842	  insert_expr_in_table (dest, insn);
843	}
844    }
845}
846
847
848/* Create hash table of memory expressions available at end of basic
849   blocks.  Basically you should think of this hash table as the
850   representation of AVAIL_OUT.  This is the set of expressions that
851   is generated in a basic block and not killed before the end of the
852   same basic block.  Notice that this is really a local computation.  */
853
854static void
855compute_hash_table (void)
856{
857  basic_block bb;
858
859  FOR_EACH_BB_FN (bb, cfun)
860    {
861      rtx_insn *insn;
862
863      /* First pass over the instructions records information used to
864	 determine when registers and memory are last set.
865	 Since we compute a "local" AVAIL_OUT, reset the tables that
866	 help us keep track of what has been modified since the start
867	 of the block.  */
868      reset_opr_set_tables ();
869      FOR_BB_INSNS (bb, insn)
870	{
871	  if (INSN_P (insn))
872            record_opr_changes (insn);
873	}
874
875      /* The next pass actually builds the hash table.  */
876      FOR_BB_INSNS (bb, insn)
877	if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SET)
878	  hash_scan_set (insn);
879    }
880}
881
882
883/* Check if register REG is killed in any insn waiting to be inserted on
884   edge E.  This function is required to check that our data flow analysis
885   is still valid prior to commit_edge_insertions.  */
886
887static bool
888reg_killed_on_edge (rtx reg, edge e)
889{
890  rtx_insn *insn;
891
892  for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
893    if (INSN_P (insn) && reg_set_p (reg, insn))
894      return true;
895
896  return false;
897}
898
899/* Similar to above - check if register REG is used in any insn waiting
900   to be inserted on edge E.
901   Assumes no such insn can be a CALL_INSN; if so call reg_used_between_p
902   with PREV(insn),NEXT(insn) instead of calling reg_overlap_mentioned_p.  */
903
904static bool
905reg_used_on_edge (rtx reg, edge e)
906{
907  rtx_insn *insn;
908
909  for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
910    if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
911      return true;
912
913  return false;
914}
915
916/* Return the loaded/stored register of a load/store instruction.  */
917
918static rtx
919get_avail_load_store_reg (rtx_insn *insn)
920{
921  if (REG_P (SET_DEST (PATTERN (insn))))
922    /* A load.  */
923    return SET_DEST (PATTERN (insn));
924  else
925    {
926      /* A store.  */
927      gcc_assert (REG_P (SET_SRC (PATTERN (insn))));
928      return SET_SRC (PATTERN (insn));
929    }
930}
931
932/* Return nonzero if the predecessors of BB are "well behaved".  */
933
934static bool
935bb_has_well_behaved_predecessors (basic_block bb)
936{
937  edge pred;
938  edge_iterator ei;
939
940  if (EDGE_COUNT (bb->preds) == 0)
941    return false;
942
943  FOR_EACH_EDGE (pred, ei, bb->preds)
944    {
945      /* commit_one_edge_insertion refuses to insert on abnormal edges even if
946	 the source has only one successor so EDGE_CRITICAL_P is too weak.  */
947      if ((pred->flags & EDGE_ABNORMAL) && !single_pred_p (pred->dest))
948	return false;
949
950      if ((pred->flags & EDGE_ABNORMAL_CALL) && cfun->has_nonlocal_label)
951	return false;
952
953      if (tablejump_p (BB_END (pred->src), NULL, NULL))
954	return false;
955    }
956  return true;
957}
958
959
960/* Search for the occurrences of expression in BB.  */
961
962static struct occr*
963get_bb_avail_insn (basic_block bb, struct occr *orig_occr, int bitmap_index)
964{
965  struct occr *occr = orig_occr;
966
967  for (; occr != NULL; occr = occr->next)
968    if (BLOCK_FOR_INSN (occr->insn) == bb)
969      return occr;
970
971  /* If we could not find an occurrence in BB, see if BB
972     has a single predecessor with an occurrence that is
973     transparent through BB.  */
974  if (transp
975      && single_pred_p (bb)
976      && bitmap_bit_p (transp[bb->index], bitmap_index)
977      && (occr = get_bb_avail_insn (single_pred (bb), orig_occr, bitmap_index)))
978    {
979      rtx avail_reg = get_avail_load_store_reg (occr->insn);
980      if (!reg_set_between_p (avail_reg,
981			      PREV_INSN (BB_HEAD (bb)),
982			      NEXT_INSN (BB_END (bb)))
983	  && !reg_killed_on_edge (avail_reg, single_pred_edge (bb)))
984	return occr;
985    }
986
987  return NULL;
988}
989
990
991/* This helper is called via htab_traverse.  */
992int
993compute_expr_transp (expr **slot, FILE *dump_file ATTRIBUTE_UNUSED)
994{
995  struct expr *expr = *slot;
996
997  compute_transp (expr->expr, expr->bitmap_index, transp,
998		  blocks_with_calls, modify_mem_list_set,
999		  canon_modify_mem_list);
1000  return 1;
1001}
1002
1003/* This handles the case where several stores feed a partially redundant
1004   load. It checks if the redundancy elimination is possible and if it's
1005   worth it.
1006
1007   Redundancy elimination is possible if,
1008   1) None of the operands of an insn have been modified since the start
1009      of the current basic block.
1010   2) In any predecessor of the current basic block, the same expression
1011      is generated.
1012
1013   See the function body for the heuristics that determine if eliminating
1014   a redundancy is also worth doing, assuming it is possible.  */
1015
1016static void
1017eliminate_partially_redundant_load (basic_block bb, rtx_insn *insn,
1018				    struct expr *expr)
1019{
1020  edge pred;
1021  rtx_insn *avail_insn = NULL;
1022  rtx avail_reg;
1023  rtx dest, pat;
1024  struct occr *a_occr;
1025  struct unoccr *occr, *avail_occrs = NULL;
1026  struct unoccr *unoccr, *unavail_occrs = NULL, *rollback_unoccr = NULL;
1027  int npred_ok = 0;
1028  profile_count ok_count = profile_count::zero ();
1029		 /* Redundant load execution count.  */
1030  profile_count critical_count = profile_count::zero ();
1031		 /* Execution count of critical edges.  */
1032  edge_iterator ei;
1033  bool critical_edge_split = false;
1034
1035  /* The execution count of the loads to be added to make the
1036     load fully redundant.  */
1037  profile_count not_ok_count = profile_count::zero ();
1038  basic_block pred_bb;
1039
1040  pat = PATTERN (insn);
1041  dest = SET_DEST (pat);
1042
1043  /* Check that the loaded register is not used, set, or killed from the
1044     beginning of the block.  */
1045  if (reg_changed_after_insn_p (dest, 0)
1046      || reg_used_between_p (dest, PREV_INSN (BB_HEAD (bb)), insn))
1047    return;
1048
1049  /* Check potential for replacing load with copy for predecessors.  */
1050  FOR_EACH_EDGE (pred, ei, bb->preds)
1051    {
1052      rtx_insn *next_pred_bb_end;
1053
1054      avail_insn = NULL;
1055      avail_reg = NULL_RTX;
1056      pred_bb = pred->src;
1057      for (a_occr = get_bb_avail_insn (pred_bb,
1058				       expr->avail_occr,
1059				       expr->bitmap_index);
1060	   a_occr;
1061	   a_occr = get_bb_avail_insn (pred_bb,
1062				       a_occr->next,
1063				       expr->bitmap_index))
1064	{
1065	  /* Check if the loaded register is not used.  */
1066	  avail_insn = a_occr->insn;
1067	  avail_reg = get_avail_load_store_reg (avail_insn);
1068	  gcc_assert (avail_reg);
1069
1070	  /* Make sure we can generate a move from register avail_reg to
1071	     dest.  */
1072	  rtx_insn *move = gen_move_insn (copy_rtx (dest),
1073					  copy_rtx (avail_reg));
1074	  extract_insn (move);
1075	  if (! constrain_operands (1, get_preferred_alternatives (insn,
1076								   pred_bb))
1077	      || reg_killed_on_edge (avail_reg, pred)
1078	      || reg_used_on_edge (dest, pred))
1079	    {
1080	      avail_insn = NULL;
1081	      continue;
1082	    }
1083	  next_pred_bb_end = NEXT_INSN (BB_END (BLOCK_FOR_INSN (avail_insn)));
1084	  if (!reg_set_between_p (avail_reg, avail_insn, next_pred_bb_end))
1085	    /* AVAIL_INSN remains non-null.  */
1086	    break;
1087	  else
1088	    avail_insn = NULL;
1089	}
1090
1091      if (EDGE_CRITICAL_P (pred) && pred->count ().initialized_p ())
1092	critical_count += pred->count ();
1093
1094      if (avail_insn != NULL_RTX)
1095	{
1096	  npred_ok++;
1097	  if (pred->count ().initialized_p ())
1098	    ok_count = ok_count + pred->count ();
1099	  if (! set_noop_p (PATTERN (gen_move_insn (copy_rtx (dest),
1100						    copy_rtx (avail_reg)))))
1101	    {
1102	      /* Check if there is going to be a split.  */
1103	      if (EDGE_CRITICAL_P (pred))
1104		critical_edge_split = true;
1105	    }
1106	  else /* Its a dead move no need to generate.  */
1107	    continue;
1108	  occr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
1109						  sizeof (struct unoccr));
1110	  occr->insn = avail_insn;
1111	  occr->pred = pred;
1112	  occr->next = avail_occrs;
1113	  avail_occrs = occr;
1114	  if (! rollback_unoccr)
1115	    rollback_unoccr = occr;
1116	}
1117      else
1118	{
1119	  /* Adding a load on a critical edge will cause a split.  */
1120	  if (EDGE_CRITICAL_P (pred))
1121	    critical_edge_split = true;
1122	  if (pred->count ().initialized_p ())
1123	    not_ok_count = not_ok_count + pred->count ();
1124	  unoccr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
1125						    sizeof (struct unoccr));
1126	  unoccr->insn = NULL;
1127	  unoccr->pred = pred;
1128	  unoccr->next = unavail_occrs;
1129	  unavail_occrs = unoccr;
1130	  if (! rollback_unoccr)
1131	    rollback_unoccr = unoccr;
1132	}
1133    }
1134
1135  if (/* No load can be replaced by copy.  */
1136      npred_ok == 0
1137      /* Prevent exploding the code.  */
1138      || (optimize_bb_for_size_p (bb) && npred_ok > 1)
1139      /* If we don't have profile information we cannot tell if splitting
1140         a critical edge is profitable or not so don't do it.  */
1141      || ((!profile_info || profile_status_for_fn (cfun) != PROFILE_READ
1142	   || targetm.cannot_modify_jumps_p ())
1143	  && critical_edge_split))
1144    goto cleanup;
1145
1146  /* Check if it's worth applying the partial redundancy elimination.  */
1147  if (ok_count.to_gcov_type ()
1148      < param_gcse_after_reload_partial_fraction * not_ok_count.to_gcov_type ())
1149    goto cleanup;
1150
1151  gcov_type threshold;
1152#if (GCC_VERSION >= 5000)
1153  if (__builtin_mul_overflow (param_gcse_after_reload_critical_fraction,
1154			      critical_count.to_gcov_type (), &threshold))
1155    threshold = profile_count::max_count;
1156#else
1157  threshold
1158    = (param_gcse_after_reload_critical_fraction
1159       * critical_count.to_gcov_type ());
1160#endif
1161
1162  if (ok_count.to_gcov_type () < threshold)
1163    goto cleanup;
1164
1165  /* Generate moves to the loaded register from where
1166     the memory is available.  */
1167  for (occr = avail_occrs; occr; occr = occr->next)
1168    {
1169      avail_insn = occr->insn;
1170      pred = occr->pred;
1171      /* Set avail_reg to be the register having the value of the
1172	 memory.  */
1173      avail_reg = get_avail_load_store_reg (avail_insn);
1174      gcc_assert (avail_reg);
1175
1176      insert_insn_on_edge (gen_move_insn (copy_rtx (dest),
1177					  copy_rtx (avail_reg)),
1178			   pred);
1179      stats.moves_inserted++;
1180
1181      if (dump_file)
1182	fprintf (dump_file,
1183		 "generating move from %d to %d on edge from %d to %d\n",
1184		 REGNO (avail_reg),
1185		 REGNO (dest),
1186		 pred->src->index,
1187		 pred->dest->index);
1188    }
1189
1190  /* Regenerate loads where the memory is unavailable.  */
1191  for (unoccr = unavail_occrs; unoccr; unoccr = unoccr->next)
1192    {
1193      pred = unoccr->pred;
1194      insert_insn_on_edge (copy_insn (PATTERN (insn)), pred);
1195      stats.copies_inserted++;
1196
1197      if (dump_file)
1198	{
1199	  fprintf (dump_file,
1200		   "generating on edge from %d to %d a copy of load: ",
1201		   pred->src->index,
1202		   pred->dest->index);
1203	  print_rtl (dump_file, PATTERN (insn));
1204	  fprintf (dump_file, "\n");
1205	}
1206    }
1207
1208  /* Delete the insn if it is not available in this block and mark it
1209     for deletion if it is available. If insn is available it may help
1210     discover additional redundancies, so mark it for later deletion.  */
1211  for (a_occr = get_bb_avail_insn (bb, expr->avail_occr, expr->bitmap_index);
1212       a_occr && (a_occr->insn != insn);
1213       a_occr = get_bb_avail_insn (bb, a_occr->next, expr->bitmap_index))
1214    ;
1215
1216  if (!a_occr)
1217    {
1218      stats.insns_deleted++;
1219
1220      if (dump_file)
1221	{
1222	  fprintf (dump_file, "deleting insn:\n");
1223          print_rtl_single (dump_file, insn);
1224          fprintf (dump_file, "\n");
1225	}
1226      delete_insn (insn);
1227    }
1228  else
1229    a_occr->deleted_p = 1;
1230
1231cleanup:
1232  if (rollback_unoccr)
1233    obstack_free (&unoccr_obstack, rollback_unoccr);
1234}
1235
1236/* Performing the redundancy elimination as described before.  */
1237
1238static void
1239eliminate_partially_redundant_loads (void)
1240{
1241  rtx_insn *insn;
1242  basic_block bb;
1243
1244  /* Note we start at block 1.  */
1245
1246  if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1247    return;
1248
1249  FOR_BB_BETWEEN (bb,
1250		  ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->next_bb,
1251		  EXIT_BLOCK_PTR_FOR_FN (cfun),
1252		  next_bb)
1253    {
1254      /* Don't try anything on basic blocks with strange predecessors.  */
1255      if (! bb_has_well_behaved_predecessors (bb))
1256	continue;
1257
1258      /* Do not try anything on cold basic blocks.  */
1259      if (optimize_bb_for_size_p (bb))
1260	continue;
1261
1262      /* Reset the table of things changed since the start of the current
1263	 basic block.  */
1264      reset_opr_set_tables ();
1265
1266      /* Look at all insns in the current basic block and see if there are
1267	 any loads in it that we can record.  */
1268      FOR_BB_INSNS (bb, insn)
1269	{
1270	  /* Is it a load - of the form (set (reg) (mem))?  */
1271	  if (NONJUMP_INSN_P (insn)
1272              && GET_CODE (PATTERN (insn)) == SET
1273	      && REG_P (SET_DEST (PATTERN (insn)))
1274	      && MEM_P (SET_SRC (PATTERN (insn))))
1275	    {
1276	      rtx pat = PATTERN (insn);
1277	      rtx src = SET_SRC (pat);
1278	      struct expr *expr;
1279
1280	      if (!MEM_VOLATILE_P (src)
1281		  && GET_MODE (src) != BLKmode
1282		  && general_operand (src, GET_MODE (src))
1283		  /* Are the operands unchanged since the start of the
1284		     block?  */
1285		  && oprs_unchanged_p (src, insn, false)
1286		  && !(cfun->can_throw_non_call_exceptions && may_trap_p (src))
1287		  && !side_effects_p (src)
1288		  /* Is the expression recorded?  */
1289		  && (expr = lookup_expr_in_table (src)) != NULL)
1290		{
1291		  /* We now have a load (insn) and an available memory at
1292		     its BB start (expr). Try to remove the loads if it is
1293		     redundant.  */
1294		  eliminate_partially_redundant_load (bb, insn, expr);
1295		}
1296	    }
1297
1298	  /* Keep track of everything modified by this insn, so that we
1299	     know what has been modified since the start of the current
1300	     basic block.  */
1301	  if (INSN_P (insn))
1302	    record_opr_changes (insn);
1303	}
1304    }
1305
1306  commit_edge_insertions ();
1307}
1308
1309/* Go over the expression hash table and delete insns that were
1310   marked for later deletion.  */
1311
1312/* This helper is called via htab_traverse.  */
1313int
1314delete_redundant_insns_1 (expr **slot, void *data ATTRIBUTE_UNUSED)
1315{
1316  struct expr *exprs = *slot;
1317  struct occr *occr;
1318
1319  for (occr = exprs->avail_occr; occr != NULL; occr = occr->next)
1320    {
1321      if (occr->deleted_p && dbg_cnt (gcse2_delete))
1322	{
1323	  delete_insn (occr->insn);
1324	  stats.insns_deleted++;
1325
1326	  if (dump_file)
1327	    {
1328	      fprintf (dump_file, "deleting insn:\n");
1329	      print_rtl_single (dump_file, occr->insn);
1330	      fprintf (dump_file, "\n");
1331	    }
1332	}
1333    }
1334
1335  return 1;
1336}
1337
1338static void
1339delete_redundant_insns (void)
1340{
1341  expr_table->traverse <void *, delete_redundant_insns_1> (NULL);
1342  if (dump_file)
1343    fprintf (dump_file, "\n");
1344}
1345
1346/* Main entry point of the GCSE after reload - clean some redundant loads
1347   due to spilling.  */
1348
1349static void
1350gcse_after_reload_main (rtx f ATTRIBUTE_UNUSED)
1351{
1352  /* Disable computing transparentness if it is too expensive.  */
1353  bool do_transp
1354    = !gcse_or_cprop_is_too_expensive (_("using simple load CSE after register "
1355					 "allocation"));
1356
1357  memset (&stats, 0, sizeof (stats));
1358
1359  /* Allocate memory for this pass.
1360     Also computes and initializes the insns' CUIDs.  */
1361  alloc_mem ();
1362
1363  /* We need alias analysis.  */
1364  init_alias_analysis ();
1365
1366  compute_hash_table ();
1367
1368  if (dump_file)
1369    dump_hash_table (dump_file);
1370
1371  if (!expr_table->is_empty ())
1372    {
1373      /* Knowing which MEMs are transparent through a block can signifiantly
1374	 increase the number of redundant loads found.  So compute transparency
1375	 information for each memory expression in the hash table.  */
1376      df_analyze ();
1377      if (do_transp)
1378	{
1379	  /* This cannot be part of the normal allocation routine because
1380	     we have to know the number of elements in the hash table.  */
1381	  transp = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
1382					 expr_table->elements ());
1383	  bitmap_vector_ones (transp, last_basic_block_for_fn (cfun));
1384	  expr_table->traverse <FILE *, compute_expr_transp> (dump_file);
1385	}
1386      else
1387	transp = NULL;
1388      eliminate_partially_redundant_loads ();
1389      delete_redundant_insns ();
1390      if (do_transp)
1391	sbitmap_vector_free (transp);
1392
1393      if (dump_file)
1394	{
1395	  fprintf (dump_file, "GCSE AFTER RELOAD stats:\n");
1396	  fprintf (dump_file, "copies inserted: %d\n", stats.copies_inserted);
1397	  fprintf (dump_file, "moves inserted:  %d\n", stats.moves_inserted);
1398	  fprintf (dump_file, "insns deleted:   %d\n", stats.insns_deleted);
1399	  fprintf (dump_file, "\n\n");
1400	}
1401
1402      statistics_counter_event (cfun, "copies inserted",
1403				stats.copies_inserted);
1404      statistics_counter_event (cfun, "moves inserted",
1405				stats.moves_inserted);
1406      statistics_counter_event (cfun, "insns deleted",
1407				stats.insns_deleted);
1408    }
1409
1410  /* We are finished with alias.  */
1411  end_alias_analysis ();
1412
1413  free_mem ();
1414}
1415
1416
1417
1418static unsigned int
1419rest_of_handle_gcse2 (void)
1420{
1421  gcse_after_reload_main (get_insns ());
1422  rebuild_jump_labels (get_insns ());
1423  return 0;
1424}
1425
1426namespace {
1427
1428const pass_data pass_data_gcse2 =
1429{
1430  RTL_PASS, /* type */
1431  "gcse2", /* name */
1432  OPTGROUP_NONE, /* optinfo_flags */
1433  TV_GCSE_AFTER_RELOAD, /* tv_id */
1434  0, /* properties_required */
1435  0, /* properties_provided */
1436  0, /* properties_destroyed */
1437  0, /* todo_flags_start */
1438  0, /* todo_flags_finish */
1439};
1440
1441class pass_gcse2 : public rtl_opt_pass
1442{
1443public:
1444  pass_gcse2 (gcc::context *ctxt)
1445    : rtl_opt_pass (pass_data_gcse2, ctxt)
1446  {}
1447
1448  /* opt_pass methods: */
1449  virtual bool gate (function *fun)
1450    {
1451      return (optimize > 0 && flag_gcse_after_reload
1452	      && optimize_function_for_speed_p (fun));
1453    }
1454
1455  virtual unsigned int execute (function *) { return rest_of_handle_gcse2 (); }
1456
1457}; // class pass_gcse2
1458
1459} // anon namespace
1460
1461rtl_opt_pass *
1462make_pass_gcse2 (gcc::context *ctxt)
1463{
1464  return new pass_gcse2 (ctxt);
1465}
1466