1/* Rtl-level loop invariant motion.
2   Copyright (C) 2004, 2005 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it
7under the terms of the GNU General Public License as published by the
8Free Software Foundation; either version 2, or (at your option) any
9later version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT
12ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING.  If not, write to the Free
18Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
1902110-1301, USA.  */
20
21/* This implements the loop invariant motion pass.  It is very simple
22   (no calls, libcalls, etc.).  This should be sufficient to cleanup things like
23   address arithmetics -- other more complicated invariants should be
24   eliminated on tree level either in tree-ssa-loop-im.c or in tree-ssa-pre.c.
25
26   We proceed loop by loop -- it is simpler than trying to handle things
27   globally and should not lose much.  First we inspect all sets inside loop
28   and create a dependency graph on insns (saying "to move this insn, you must
29   also move the following insns").
30
31   We then need to determine what to move.  We estimate the number of registers
32   used and move as many invariants as possible while we still have enough free
33   registers.  We prefer the expensive invariants.
34
35   Then we move the selected invariants out of the loop, creating a new
36   temporaries for them if necessary.  */
37
38#include "config.h"
39#include "system.h"
40#include "coretypes.h"
41#include "tm.h"
42#include "rtl.h"
43#include "tm_p.h"
44#include "hard-reg-set.h"
45#include "obstack.h"
46#include "basic-block.h"
47#include "cfgloop.h"
48#include "expr.h"
49#include "output.h"
50#include "function.h"
51#include "flags.h"
52#include "df.h"
53
54/* The data stored for the loop.  */
55
56struct loop_data
57{
58  struct loop *outermost_exit;	/* The outermost exit of the loop.  */
59  bool has_call;		/* True if the loop contains a call.  */
60};
61
62#define LOOP_DATA(LOOP) ((struct loop_data *) (LOOP)->aux)
63
64/* The description of an use.  */
65
66struct use
67{
68  rtx *pos;			/* Position of the use.  */
69  rtx insn;			/* The insn in that the use occurs.  */
70
71  struct use *next;		/* Next use in the list.  */
72};
73
74/* The description of a def.  */
75
76struct def
77{
78  struct use *uses;		/* The list of uses that are uniquely reached
79				   by it.  */
80  unsigned n_uses;		/* Number of such uses.  */
81  unsigned invno;		/* The corresponding invariant.  */
82};
83
84/* The data stored for each invariant.  */
85
86struct invariant
87{
88  /* The number of the invariant.  */
89  unsigned invno;
90
91  /* Whether we already processed the invariant.  */
92  bool processed;
93
94  /* The definition of the invariant.  */
95  struct def *def;
96
97  /* The insn in that it is defined.  */
98  rtx insn;
99
100  /* Whether it is always executed.  */
101  bool always_executed;
102
103  /* Whether to move the invariant.  */
104  bool move;
105
106  /* Cost if the invariant.  */
107  unsigned cost;
108
109  /* The invariants it depends on.  */
110  bitmap depends_on;
111
112  /* Used for detecting already visited invariants during determining
113     costs of movements.  */
114  unsigned stamp;
115};
116
117/* The actual stamp for marking already visited invariants during determining
118   costs of movements.  */
119
120static unsigned actual_stamp;
121
122typedef struct invariant *invariant_p;
123
124DEF_VEC_P(invariant_p);
125DEF_VEC_ALLOC_P(invariant_p, heap);
126
127/* The invariants.  */
128
129static VEC(invariant_p,heap) *invariants;
130
131/* Test for possibility of invariantness of X.  */
132
133static bool
134check_maybe_invariant (rtx x)
135{
136  enum rtx_code code = GET_CODE (x);
137  int i, j;
138  const char *fmt;
139
140  switch (code)
141    {
142    case CONST_INT:
143    case CONST_DOUBLE:
144    case SYMBOL_REF:
145    case CONST:
146    case LABEL_REF:
147      return true;
148
149    case PC:
150    case CC0:
151    case UNSPEC_VOLATILE:
152    case CALL:
153      return false;
154
155    case REG:
156      return true;
157
158    case MEM:
159      /* Load/store motion is done elsewhere.  ??? Perhaps also add it here?
160	 It should not be hard, and might be faster than "elsewhere".  */
161
162      /* Just handle the most trivial case where we load from an unchanging
163	 location (most importantly, pic tables).  */
164      if (MEM_READONLY_P (x))
165	break;
166
167      return false;
168
169    case ASM_OPERANDS:
170      /* Don't mess with insns declared volatile.  */
171      if (MEM_VOLATILE_P (x))
172	return false;
173      break;
174
175    default:
176      break;
177    }
178
179  fmt = GET_RTX_FORMAT (code);
180  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
181    {
182      if (fmt[i] == 'e')
183	{
184	  if (!check_maybe_invariant (XEXP (x, i)))
185	    return false;
186	}
187      else if (fmt[i] == 'E')
188	{
189	  for (j = 0; j < XVECLEN (x, i); j++)
190	    if (!check_maybe_invariant (XVECEXP (x, i, j)))
191	      return false;
192	}
193    }
194
195  return true;
196}
197
198/* Determines the basic blocks inside LOOP that are always executed and
199   stores their bitmap to ALWAYS_REACHED.  MAY_EXIT is a bitmap of
200   basic blocks that may either exit the loop, or contain the call that
201   does not have to return.  BODY is body of the loop obtained by
202   get_loop_body_in_dom_order.  */
203
204static void
205compute_always_reached (struct loop *loop, basic_block *body,
206			bitmap may_exit, bitmap always_reached)
207{
208  unsigned i;
209
210  for (i = 0; i < loop->num_nodes; i++)
211    {
212      if (dominated_by_p (CDI_DOMINATORS, loop->latch, body[i]))
213	bitmap_set_bit (always_reached, i);
214
215      if (bitmap_bit_p (may_exit, i))
216	return;
217    }
218}
219
220/* Finds exits out of the LOOP with body BODY.  Marks blocks in that we may
221   exit the loop by cfg edge to HAS_EXIT and MAY_EXIT.  In MAY_EXIT
222   additionally mark blocks that may exit due to a call.  */
223
224static void
225find_exits (struct loop *loop, basic_block *body,
226	    bitmap may_exit, bitmap has_exit)
227{
228  unsigned i;
229  edge_iterator ei;
230  edge e;
231  struct loop *outermost_exit = loop, *aexit;
232  bool has_call = false;
233  rtx insn;
234
235  for (i = 0; i < loop->num_nodes; i++)
236    {
237      if (body[i]->loop_father == loop)
238	{
239	  FOR_BB_INSNS (body[i], insn)
240	    {
241	      if (CALL_P (insn)
242		  && !CONST_OR_PURE_CALL_P (insn))
243		{
244		  has_call = true;
245		  bitmap_set_bit (may_exit, i);
246		  break;
247		}
248	    }
249
250	  FOR_EACH_EDGE (e, ei, body[i]->succs)
251	    {
252	      if (flow_bb_inside_loop_p (loop, e->dest))
253		continue;
254
255	      bitmap_set_bit (may_exit, i);
256	      bitmap_set_bit (has_exit, i);
257	      outermost_exit = find_common_loop (outermost_exit,
258						 e->dest->loop_father);
259	    }
260	  continue;
261	}
262
263      /* Use the data stored for the subloop to decide whether we may exit
264	 through it.  It is sufficient to do this for header of the loop,
265	 as other basic blocks inside it must be dominated by it.  */
266      if (body[i]->loop_father->header != body[i])
267	continue;
268
269      if (LOOP_DATA (body[i]->loop_father)->has_call)
270	{
271	  has_call = true;
272	  bitmap_set_bit (may_exit, i);
273	}
274      aexit = LOOP_DATA (body[i]->loop_father)->outermost_exit;
275      if (aexit != loop)
276	{
277	  bitmap_set_bit (may_exit, i);
278	  bitmap_set_bit (has_exit, i);
279
280	  if (flow_loop_nested_p (aexit, outermost_exit))
281	    outermost_exit = aexit;
282	}
283    }
284
285  loop->aux = xcalloc (1, sizeof (struct loop_data));
286  LOOP_DATA (loop)->outermost_exit = outermost_exit;
287  LOOP_DATA (loop)->has_call = has_call;
288}
289
290/* Check whether we may assign a value to X from a register.  */
291
292static bool
293may_assign_reg_p (rtx x)
294{
295  return (can_copy_p (GET_MODE (x))
296	  && (!REG_P (x)
297	      || !HARD_REGISTER_P (x)
298	      || REGNO_REG_CLASS (REGNO (x)) != NO_REGS));
299}
300
301/* Finds definitions that may correspond to invariants in LOOP with body BODY.
302   DF is the dataflow object.  */
303
304static void
305find_defs (struct loop *loop, basic_block *body, struct df *df)
306{
307  unsigned i;
308  bitmap blocks = BITMAP_ALLOC (NULL);
309
310  for (i = 0; i < loop->num_nodes; i++)
311    bitmap_set_bit (blocks, body[i]->index);
312
313  df_analyze_subcfg (df, blocks, DF_UD_CHAIN | DF_HARD_REGS | DF_EQUIV_NOTES);
314  BITMAP_FREE (blocks);
315}
316
317/* Creates a new invariant for definition DEF in INSN, depending on invariants
318   in DEPENDS_ON.  ALWAYS_EXECUTED is true if the insn is always executed,
319   unless the program ends due to a function call.  */
320
321static void
322create_new_invariant (struct def *def, rtx insn, bitmap depends_on,
323		      bool always_executed)
324{
325  struct invariant *inv = xmalloc (sizeof (struct invariant));
326  rtx set = single_set (insn);
327
328  inv->def = def;
329  inv->always_executed = always_executed;
330  inv->depends_on = depends_on;
331
332  /* If the set is simple, usually by moving it we move the whole store out of
333     the loop.  Otherwise we save only cost of the computation.  */
334  if (def)
335    inv->cost = rtx_cost (set, SET);
336  else
337    inv->cost = rtx_cost (SET_SRC (set), SET);
338
339  inv->move = false;
340  inv->processed = false;
341  inv->stamp = 0;
342  inv->insn = insn;
343
344  inv->invno = VEC_length (invariant_p, invariants);
345  if (def)
346    def->invno = inv->invno;
347  VEC_safe_push (invariant_p, heap, invariants, inv);
348
349  if (dump_file)
350    {
351      fprintf (dump_file,
352	       "Set in insn %d is invariant (%d), cost %d, depends on ",
353	       INSN_UID (insn), inv->invno, inv->cost);
354      dump_bitmap (dump_file, inv->depends_on);
355    }
356}
357
358/* Record USE at DEF.  */
359
360static void
361record_use (struct def *def, rtx *use, rtx insn)
362{
363  struct use *u = xmalloc (sizeof (struct use));
364
365  if (GET_CODE (*use) == SUBREG)
366    use = &SUBREG_REG (*use);
367  gcc_assert (REG_P (*use));
368
369  u->pos = use;
370  u->insn = insn;
371  u->next = def->uses;
372  def->uses = u;
373  def->n_uses++;
374}
375
376/* Finds the invariants INSN depends on and store them to the DEPENDS_ON
377   bitmap.  DF is the dataflow object.  */
378
379static bool
380check_dependencies (rtx insn, struct df *df, bitmap depends_on)
381{
382  struct df_link *uses, *defs;
383  struct ref *use, *def;
384  basic_block bb = BLOCK_FOR_INSN (insn), def_bb;
385  struct def *def_data;
386
387  for (uses = DF_INSN_USES (df, insn); uses; uses = uses->next)
388    {
389      use = uses->ref;
390
391      defs = DF_REF_CHAIN (use);
392      if (!defs)
393	continue;
394
395      if (defs->next)
396	return false;
397
398      def = defs->ref;
399      def_data = DF_REF_DATA (def);
400      if (!def_data)
401	return false;
402
403      def_bb = DF_REF_BB (def);
404      if (!dominated_by_p (CDI_DOMINATORS, bb, def_bb))
405	return false;
406
407      bitmap_set_bit (depends_on, def_data->invno);
408    }
409
410  return true;
411}
412
413/* Finds invariant in INSN.  ALWAYS_REACHED is true if the insn is always
414   executed.  ALWAYS_EXECUTED is true if the insn is always executed,
415   unless the program ends due to a function call.  DF is the dataflow
416   object.  */
417
418static void
419find_invariant_insn (rtx insn, bool always_reached, bool always_executed,
420		     struct df *df)
421{
422  struct ref *ref;
423  struct def *def;
424  bitmap depends_on;
425  rtx set, dest;
426  bool simple = true;
427
428  /* Until we get rid of LIBCALLS.  */
429  if (find_reg_note (insn, REG_RETVAL, NULL_RTX)
430      || find_reg_note (insn, REG_LIBCALL, NULL_RTX)
431      || find_reg_note (insn, REG_NO_CONFLICT, NULL_RTX))
432    return;
433
434  set = single_set (insn);
435  if (!set)
436    return;
437  dest = SET_DEST (set);
438
439  if (!REG_P (dest)
440      || HARD_REGISTER_P (dest))
441    simple = false;
442
443  if (!may_assign_reg_p (SET_DEST (set))
444      || !check_maybe_invariant (SET_SRC (set)))
445    return;
446
447  if (may_trap_p (PATTERN (insn)))
448    {
449      if (!always_reached)
450	return;
451
452      /* Unless the exceptions are handled, the behavior is undefined
453 	 if the trap occurs.  */
454      if (flag_non_call_exceptions)
455	return;
456    }
457
458  depends_on = BITMAP_ALLOC (NULL);
459  if (!check_dependencies (insn, df, depends_on))
460    {
461      BITMAP_FREE (depends_on);
462      return;
463    }
464
465  if (simple)
466    {
467      ref = df_find_def (df, insn, dest);
468      def = xcalloc (1, sizeof (struct def));
469      DF_REF_DATA (ref) = def;
470    }
471  else
472    def = NULL;
473
474  create_new_invariant (def, insn, depends_on, always_executed);
475}
476
477/* Record registers used in INSN that have a unique invariant definition.
478   DF is the dataflow object.  */
479
480static void
481record_uses (rtx insn, struct df *df)
482{
483  struct df_link *uses, *defs;
484  struct ref *use, *def;
485  basic_block bb = BLOCK_FOR_INSN (insn), def_bb;
486
487  for (uses = DF_INSN_USES (df, insn); uses; uses = uses->next)
488    {
489      use = uses->ref;
490
491      defs = DF_REF_CHAIN (use);
492      if (!defs || defs->next)
493	continue;
494      def = defs->ref;
495      if (!DF_REF_DATA (def))
496	continue;
497
498      def_bb = DF_REF_BB (def);
499      if (!dominated_by_p (CDI_DOMINATORS, bb, def_bb))
500	continue;
501
502      record_use (DF_REF_DATA (def), DF_REF_LOC (use), DF_REF_INSN (use));
503    }
504}
505
506/* Finds invariants in INSN.  ALWAYS_REACHED is true if the insn is always
507   executed.  ALWAYS_EXECUTED is true if the insn is always executed,
508   unless the program ends due to a function call.  DF is the dataflow
509   object.  */
510
511static void
512find_invariants_insn (rtx insn, bool always_reached, bool always_executed,
513		      struct df *df)
514{
515  find_invariant_insn (insn, always_reached, always_executed, df);
516  record_uses (insn, df);
517}
518
519/* Finds invariants in basic block BB.  ALWAYS_REACHED is true if the
520   basic block is always executed.  ALWAYS_EXECUTED is true if the basic
521   block is always executed, unless the program ends due to a function
522   call.  DF is the dataflow object.  */
523
524static void
525find_invariants_bb (basic_block bb, bool always_reached, bool always_executed,
526		    struct df *df)
527{
528  rtx insn;
529
530  FOR_BB_INSNS (bb, insn)
531    {
532      if (!INSN_P (insn))
533	continue;
534
535      find_invariants_insn (insn, always_reached, always_executed, df);
536
537      if (always_reached
538	  && CALL_P (insn)
539	  && !CONST_OR_PURE_CALL_P (insn))
540	always_reached = false;
541    }
542}
543
544/* Finds invariants in LOOP with body BODY.  ALWAYS_REACHED is the bitmap of
545   basic blocks in BODY that are always executed.  ALWAYS_EXECUTED is the
546   bitmap of basic blocks in BODY that are always executed unless the program
547   ends due to a function call.  DF is the dataflow object.  */
548
549static void
550find_invariants_body (struct loop *loop, basic_block *body,
551		      bitmap always_reached, bitmap always_executed,
552		      struct df *df)
553{
554  unsigned i;
555
556  for (i = 0; i < loop->num_nodes; i++)
557    find_invariants_bb (body[i],
558			bitmap_bit_p (always_reached, i),
559			bitmap_bit_p (always_executed, i),
560			df);
561}
562
563/* Finds invariants in LOOP.  DF is the dataflow object.  */
564
565static void
566find_invariants (struct loop *loop, struct df *df)
567{
568  bitmap may_exit = BITMAP_ALLOC (NULL);
569  bitmap always_reached = BITMAP_ALLOC (NULL);
570  bitmap has_exit = BITMAP_ALLOC (NULL);
571  bitmap always_executed = BITMAP_ALLOC (NULL);
572  basic_block *body = get_loop_body_in_dom_order (loop);
573
574  find_exits (loop, body, may_exit, has_exit);
575  compute_always_reached (loop, body, may_exit, always_reached);
576  compute_always_reached (loop, body, has_exit, always_executed);
577
578  find_defs (loop, body, df);
579  find_invariants_body (loop, body, always_reached, always_executed, df);
580
581  BITMAP_FREE (always_reached);
582  BITMAP_FREE (always_executed);
583  BITMAP_FREE (may_exit);
584  BITMAP_FREE (has_exit);
585  free (body);
586}
587
588/* Frees a list of uses USE.  */
589
590static void
591free_use_list (struct use *use)
592{
593  struct use *next;
594
595  for (; use; use = next)
596    {
597      next = use->next;
598      free (use);
599    }
600}
601
602/* Calculates cost and number of registers needed for moving invariant INV
603   out of the loop and stores them to *COST and *REGS_NEEDED.  */
604
605static void
606get_inv_cost (struct invariant *inv, int *comp_cost, unsigned *regs_needed)
607{
608  int acomp_cost;
609  unsigned aregs_needed;
610  unsigned depno;
611  struct invariant *dep;
612  bitmap_iterator bi;
613
614  *comp_cost = 0;
615  *regs_needed = 0;
616  if (inv->move
617      || inv->stamp == actual_stamp)
618    return;
619  inv->stamp = actual_stamp;
620
621  (*regs_needed)++;
622  (*comp_cost) += inv->cost;
623
624  EXECUTE_IF_SET_IN_BITMAP (inv->depends_on, 0, depno, bi)
625    {
626      dep = VEC_index (invariant_p, invariants, depno);
627
628      get_inv_cost (dep, &acomp_cost, &aregs_needed);
629
630      if (aregs_needed
631	  /* We need to check always_executed, since if the original value of
632	     the invariant may be preserved, we may need to keep it in a
633	     separate register.  TODO check whether the register has an
634	     use outside of the loop.  */
635	  && dep->always_executed
636	  && !dep->def->uses->next)
637	{
638	  /* If this is a single use, after moving the dependency we will not
639	     need a new register.  */
640	  aregs_needed--;
641	}
642
643      (*regs_needed) += aregs_needed;
644      (*comp_cost) += acomp_cost;
645    }
646}
647
648/* Calculates gain for eliminating invariant INV.  REGS_USED is the number
649   of registers used in the loop, N_INV_USES is the number of uses of
650   invariants, NEW_REGS is the number of new variables already added due to
651   the invariant motion.  The number of registers needed for it is stored in
652   *REGS_NEEDED.  */
653
654static int
655gain_for_invariant (struct invariant *inv, unsigned *regs_needed,
656		    unsigned new_regs, unsigned regs_used, unsigned n_inv_uses)
657{
658  int comp_cost, size_cost;
659
660  get_inv_cost (inv, &comp_cost, regs_needed);
661  actual_stamp++;
662
663  size_cost = (global_cost_for_size (new_regs + *regs_needed,
664				     regs_used, n_inv_uses)
665	       - global_cost_for_size (new_regs, regs_used, n_inv_uses));
666
667  return comp_cost - size_cost;
668}
669
670/* Finds invariant with best gain for moving.  Returns the gain, stores
671   the invariant in *BEST and number of registers needed for it to
672   *REGS_NEEDED.  REGS_USED is the number of registers used in
673   the loop, N_INV_USES is the number of uses of invariants.  NEW_REGS
674   is the number of new variables already added due to invariant motion.  */
675
676static int
677best_gain_for_invariant (struct invariant **best, unsigned *regs_needed,
678			 unsigned new_regs, unsigned regs_used,
679			 unsigned n_inv_uses)
680{
681  struct invariant *inv;
682  int gain = 0, again;
683  unsigned aregs_needed, invno;
684
685  for (invno = 0; VEC_iterate (invariant_p, invariants, invno, inv); invno++)
686    {
687      if (inv->move)
688	continue;
689
690      again = gain_for_invariant (inv, &aregs_needed,
691				  new_regs, regs_used, n_inv_uses);
692      if (again > gain)
693	{
694	  gain = again;
695	  *best = inv;
696	  *regs_needed = aregs_needed;
697	}
698    }
699
700  return gain;
701}
702
703/* Marks invariant INVNO and all its dependencies for moving.  */
704
705static void
706set_move_mark (unsigned invno)
707{
708  struct invariant *inv = VEC_index (invariant_p, invariants, invno);
709  bitmap_iterator bi;
710
711  if (inv->move)
712    return;
713  inv->move = true;
714
715  if (dump_file)
716    fprintf (dump_file, "Decided to move invariant %d\n", invno);
717
718  EXECUTE_IF_SET_IN_BITMAP (inv->depends_on, 0, invno, bi)
719    {
720      set_move_mark (invno);
721    }
722}
723
724/* Determines which invariants to move.  DF is the dataflow object.  */
725
726static void
727find_invariants_to_move (struct df *df)
728{
729  unsigned i, regs_used, n_inv_uses, regs_needed = 0, new_regs;
730  struct invariant *inv = NULL;
731
732  if (!VEC_length (invariant_p, invariants))
733    return;
734
735  /* Now something slightly more involved.  First estimate the number of used
736     registers.  */
737  n_inv_uses = 0;
738
739  /* We do not really do a good job in this estimation; put some initial bound
740     here to stand for induction variables etc. that we do not detect.  */
741  regs_used = 2;
742
743  for (i = 0; i < df->n_regs; i++)
744    {
745      if (!DF_REGNO_FIRST_DEF (df, i) && DF_REGNO_LAST_USE (df, i))
746	{
747	  /* This is a value that is used but not changed inside loop.  */
748	  regs_used++;
749	}
750    }
751
752  for (i = 0; VEC_iterate (invariant_p, invariants, i, inv); i++)
753    {
754      if (inv->def)
755	n_inv_uses += inv->def->n_uses;
756    }
757
758  new_regs = 0;
759  while (best_gain_for_invariant (&inv, &regs_needed,
760				  new_regs, regs_used, n_inv_uses) > 0)
761    {
762      set_move_mark (inv->invno);
763      new_regs += regs_needed;
764    }
765}
766
767/* Move invariant INVNO out of the LOOP.  DF is the dataflow object.  */
768
769static void
770move_invariant_reg (struct loop *loop, unsigned invno, struct df *df)
771{
772  struct invariant *inv = VEC_index (invariant_p, invariants, invno);
773  unsigned i;
774  basic_block preheader = loop_preheader_edge (loop)->src;
775  rtx reg, set;
776  struct use *use;
777  bitmap_iterator bi;
778
779  if (inv->processed)
780    return;
781  inv->processed = true;
782
783  if (inv->depends_on)
784    {
785      EXECUTE_IF_SET_IN_BITMAP (inv->depends_on, 0, i, bi)
786	{
787	  move_invariant_reg (loop, i, df);
788	}
789    }
790
791  /* Move the set out of the loop.  If the set is always executed (we could
792     omit this condition if we know that the register is unused outside of the
793     loop, but it does not seem worth finding out) and it has no uses that
794     would not be dominated by it, we may just move it (TODO).  Otherwise we
795     need to create a temporary register.  */
796  set = single_set (inv->insn);
797  reg = gen_reg_rtx (GET_MODE (SET_DEST (set)));
798  df_pattern_emit_after (df, gen_move_insn (SET_DEST (set), reg),
799			 BLOCK_FOR_INSN (inv->insn), inv->insn);
800
801  /* If the SET_DEST of the invariant insn is a reg, we can just move
802     the insn out of the loop.  Otherwise, we have to use gen_move_insn
803     to let emit_move_insn produce a valid instruction stream.  */
804  if (REG_P (SET_DEST (set)))
805    {
806      SET_DEST (set) = reg;
807      reorder_insns (inv->insn, inv->insn, BB_END (preheader));
808      df_insn_modify (df, preheader, inv->insn);
809    }
810  else
811    {
812      df_pattern_emit_after (df, gen_move_insn (reg, SET_SRC (set)),
813			     preheader, BB_END (preheader));
814      df_insn_delete (df, BLOCK_FOR_INSN (inv->insn), inv->insn);
815    }
816
817  /* Replace the uses we know to be dominated.  It saves work for copy
818     propagation, and also it is necessary so that dependent invariants
819     are computed right.  */
820  if (inv->def)
821    {
822      for (use = inv->def->uses; use; use = use->next)
823	{
824	  *use->pos = reg;
825	  df_insn_modify (df, BLOCK_FOR_INSN (use->insn), use->insn);
826	}
827    }
828}
829
830/* Move selected invariant out of the LOOP.  Newly created regs are marked
831   in TEMPORARY_REGS.  DF is the dataflow object.  */
832
833static void
834move_invariants (struct loop *loop, struct df *df)
835{
836  struct invariant *inv;
837  unsigned i;
838
839  for (i = 0; VEC_iterate (invariant_p, invariants, i, inv); i++)
840    {
841      if (inv->move)
842	move_invariant_reg (loop, i, df);
843    }
844}
845
846/* Initializes invariant motion data.  */
847
848static void
849init_inv_motion_data (void)
850{
851  actual_stamp = 1;
852
853  invariants = VEC_alloc (invariant_p, heap, 100);
854}
855
856/* Frees the data allocated by invariant motion.  DF is the dataflow
857   object.  */
858
859static void
860free_inv_motion_data (struct df *df)
861{
862  unsigned i;
863  struct def *def;
864  struct invariant *inv;
865
866  for (i = 0; i < df->n_defs; i++)
867    {
868      if (!df->defs[i])
869	continue;
870
871      def = DF_REF_DATA (df->defs[i]);
872      if (!def)
873	continue;
874
875      free_use_list (def->uses);
876      free (def);
877      DF_REF_DATA (df->defs[i]) = NULL;
878    }
879
880  for (i = 0; VEC_iterate (invariant_p, invariants, i, inv); i++)
881    {
882      BITMAP_FREE (inv->depends_on);
883      free (inv);
884    }
885  VEC_free (invariant_p, heap, invariants);
886}
887
888/* Move the invariants out of the LOOP.  DF is the dataflow object.  */
889
890static void
891move_single_loop_invariants (struct loop *loop, struct df *df)
892{
893  init_inv_motion_data ();
894
895  find_invariants (loop, df);
896  find_invariants_to_move (df);
897  move_invariants (loop, df);
898
899  free_inv_motion_data (df);
900}
901
902/* Releases the auxiliary data for LOOP.  */
903
904static void
905free_loop_data (struct loop *loop)
906{
907  struct loop_data *data = LOOP_DATA (loop);
908
909  free (data);
910  loop->aux = NULL;
911}
912
913/* Move the invariants out of the LOOPS.  */
914
915void
916move_loop_invariants (struct loops *loops)
917{
918  struct loop *loop;
919  unsigned i;
920  struct df *df = df_init ();
921
922  /* Process the loops, innermost first.  */
923  loop = loops->tree_root;
924  while (loop->inner)
925    loop = loop->inner;
926
927  while (loop != loops->tree_root)
928    {
929      move_single_loop_invariants (loop, df);
930
931      if (loop->next)
932	{
933	  loop = loop->next;
934	  while (loop->inner)
935	    loop = loop->inner;
936	}
937      else
938	loop = loop->outer;
939    }
940
941  for (i = 1; i < loops->num; i++)
942    if (loops->parray[i])
943      free_loop_data (loops->parray[i]);
944
945  df_finish (df);
946
947#ifdef ENABLE_CHECKING
948  verify_flow_info ();
949#endif
950}
951