1/* Dataflow support routines.
2   Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005
3   Free Software Foundation, Inc.
4   Contributed by Michael P. Hayes (m.hayes@elec.canterbury.ac.nz,
5                                    mhayes@redhat.com)
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
11Software Foundation; either version 2, or (at your option) any later
12version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
20along with GCC; see the file COPYING.  If not, write to the Free
21Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2202110-1301, USA.
23
24
25OVERVIEW:
26
27This file provides some dataflow routines for computing reaching defs,
28upward exposed uses, live variables, def-use chains, and use-def
29chains.  The global dataflow is performed using simple iterative
30methods with a worklist and could be sped up by ordering the blocks
31with a depth first search order.
32
33A `struct ref' data structure (ref) is allocated for every register
34reference (def or use) and this records the insn and bb the ref is
35found within.  The refs are linked together in chains of uses and defs
36for each insn and for each register.  Each ref also has a chain field
37that links all the use refs for a def or all the def refs for a use.
38This is used to create use-def or def-use chains.
39
40
41USAGE:
42
43Here's an example of using the dataflow routines.
44
45      struct df *df;
46
47      df = df_init ();
48
49      df_analyze (df, 0, DF_ALL);
50
51      df_dump (df, DF_ALL, stderr);
52
53      df_finish (df);
54
55
56df_init simply creates a poor man's object (df) that needs to be
57passed to all the dataflow routines.  df_finish destroys this
58object and frees up any allocated memory.   DF_ALL says to analyze
59everything.
60
61df_analyze performs the following:
62
631. Records defs and uses by scanning the insns in each basic block
64   or by scanning the insns queued by df_insn_modify.
652. Links defs and uses into insn-def and insn-use chains.
663. Links defs and uses into reg-def and reg-use chains.
674. Assigns LUIDs to each insn (for modified blocks).
685. Calculates local reaching definitions.
696. Calculates global reaching definitions.
707. Creates use-def chains.
718. Calculates local reaching uses (upwards exposed uses).
729. Calculates global reaching uses.
7310. Creates def-use chains.
7411. Calculates local live registers.
7512. Calculates global live registers.
7613. Calculates register lifetimes and determines local registers.
77
78
79PHILOSOPHY:
80
81Note that the dataflow information is not updated for every newly
82deleted or created insn.  If the dataflow information requires
83updating then all the changed, new, or deleted insns needs to be
84marked with df_insn_modify (or df_insns_modify) either directly or
85indirectly (say through calling df_insn_delete).  df_insn_modify
86marks all the modified insns to get processed the next time df_analyze
87 is called.
88
89Beware that tinkering with insns may invalidate the dataflow information.
90The philosophy behind these routines is that once the dataflow
91information has been gathered, the user should store what they require
92before they tinker with any insn.  Once a reg is replaced, for example,
93then the reg-def/reg-use chains will point to the wrong place.  Once a
94whole lot of changes have been made, df_analyze can be called again
95to update the dataflow information.  Currently, this is not very smart
96with regard to propagating changes to the dataflow so it should not
97be called very often.
98
99
100DATA STRUCTURES:
101
102The basic object is a REF (reference) and this may either be a DEF
103(definition) or a USE of a register.
104
105These are linked into a variety of lists; namely reg-def, reg-use,
106  insn-def, insn-use, def-use, and use-def lists.  For example,
107the reg-def lists contain all the refs that define a given register
108while the insn-use lists contain all the refs used by an insn.
109
110Note that the reg-def and reg-use chains are generally short (except for
111the hard registers) and thus it is much faster to search these chains
112rather than searching the def or use bitmaps.
113
114If the insns are in SSA form then the reg-def and use-def lists
115should only contain the single defining ref.
116
117
118TODO:
119
1201) Incremental dataflow analysis.
121
122Note that if a loop invariant insn is hoisted (or sunk), we do not
123need to change the def-use or use-def chains.  All we have to do is to
124change the bb field for all the associated defs and uses and to
125renumber the LUIDs for the original and new basic blocks of the insn.
126
127When shadowing loop mems we create new uses and defs for new pseudos
128so we do not affect the existing dataflow information.
129
130My current strategy is to queue up all modified, created, or deleted
131insns so when df_analyze is called we can easily determine all the new
132or deleted refs.  Currently the global dataflow information is
133recomputed from scratch but this could be propagated more efficiently.
134
1352) Reduced memory requirements.
136
137We could operate a pool of ref structures.  When a ref is deleted it
138gets returned to the pool (say by linking on to a chain of free refs).
139This will require a pair of bitmaps for defs and uses so that we can
140tell which ones have been changed.  Alternatively, we could
141periodically squeeze the def and use tables and associated bitmaps and
142renumber the def and use ids.
143
1443) Ordering of reg-def and reg-use lists.
145
146Should the first entry in the def list be the first def (within a BB)?
147Similarly, should the first entry in the use list be the last use
148(within a BB)?
149
1504) Working with a sub-CFG.
151
152Often the whole CFG does not need to be analyzed, for example,
153when optimizing a loop, only certain registers are of interest.
154Perhaps there should be a bitmap argument to df_analyze to specify
155which registers should be analyzed?
156
157
158NOTES:
159
160Embedded addressing side-effects, such as POST_INC or PRE_INC, generate
161both a use and a def.  These are both marked read/write to show that they
162are dependent. For example, (set (reg 40) (mem (post_inc (reg 42))))
163will generate a use of reg 42 followed by a def of reg 42 (both marked
164read/write).  Similarly, (set (reg 40) (mem (pre_dec (reg 41))))
165generates a use of reg 41 then a def of reg 41 (both marked read/write),
166even though reg 41 is decremented before it is used for the memory
167address in this second example.
168
169A set to a REG inside a ZERO_EXTRACT, or a set to a non-paradoxical SUBREG
170for which the number of word_mode units covered by the outer mode is
171smaller than that covered by the inner mode, invokes a read-modify-write.
172operation.  We generate both a use and a def and again mark them
173read/write.
174Paradoxical subreg writes don't leave a trace of the old content, so they
175are write-only operations.  */
176
177#include "config.h"
178#include "system.h"
179#include "coretypes.h"
180#include "tm.h"
181#include "rtl.h"
182#include "tm_p.h"
183#include "insn-config.h"
184#include "recog.h"
185#include "function.h"
186#include "regs.h"
187#include "alloc-pool.h"
188#include "hard-reg-set.h"
189#include "basic-block.h"
190#include "sbitmap.h"
191#include "bitmap.h"
192#include "df.h"
193
194#define FOR_EACH_BB_IN_BITMAP(BITMAP, MIN, BB, CODE)	\
195  do							\
196    {							\
197      unsigned int node_;				\
198      bitmap_iterator bi;				\
199      EXECUTE_IF_SET_IN_BITMAP (BITMAP, MIN, node_, bi)	\
200	{						\
201	  (BB) = BASIC_BLOCK (node_);			\
202	  CODE;						\
203	}						\
204    }							\
205  while (0)
206
207static alloc_pool df_ref_pool;
208static alloc_pool df_link_pool;
209static struct df *ddf;
210
211static void df_reg_table_realloc (struct df *, int);
212static void df_insn_table_realloc (struct df *, unsigned int);
213static void df_bb_table_realloc (struct df *, unsigned int);
214static void df_bitmaps_alloc (struct df *, bitmap, int);
215static void df_bitmaps_free (struct df *, int);
216static void df_free (struct df *);
217static void df_alloc (struct df *, int);
218
219static rtx df_reg_use_gen (unsigned int);
220
221static inline struct df_link *df_link_create (struct ref *, struct df_link *);
222static struct df_link *df_ref_unlink (struct df_link **, struct ref *);
223static void df_def_unlink (struct df *, struct ref *);
224static void df_use_unlink (struct df *, struct ref *);
225static void df_insn_refs_unlink (struct df *, basic_block, rtx);
226#if 0
227static void df_bb_refs_unlink (struct df *, basic_block);
228static void df_refs_unlink (struct df *, bitmap);
229#endif
230
231static struct ref *df_ref_create (struct df *, rtx, rtx *, rtx,
232				  enum df_ref_type, enum df_ref_flags);
233static void df_ref_record_1 (struct df *, rtx, rtx *, rtx, enum df_ref_type,
234			     enum df_ref_flags);
235static void df_ref_record (struct df *, rtx, rtx *, rtx, enum df_ref_type,
236			   enum df_ref_flags);
237static void df_def_record_1 (struct df *, rtx, basic_block, rtx);
238static void df_defs_record (struct df *, rtx, basic_block, rtx);
239static void df_uses_record (struct df *, rtx *, enum df_ref_type,
240			    basic_block, rtx, enum df_ref_flags);
241static void df_insn_refs_record (struct df *, basic_block, rtx);
242static void df_bb_refs_record (struct df *, basic_block);
243static void df_refs_record (struct df *, bitmap);
244
245static void df_bb_reg_def_chain_create (struct df *, basic_block);
246static void df_reg_def_chain_create (struct df *, bitmap, bool);
247static void df_bb_reg_use_chain_create (struct df *, basic_block);
248static void df_reg_use_chain_create (struct df *, bitmap, bool);
249static void df_bb_du_chain_create (struct df *, basic_block, bitmap);
250static void df_du_chain_create (struct df *, bitmap);
251static void df_bb_ud_chain_create (struct df *, basic_block);
252static void df_ud_chain_create (struct df *, bitmap);
253static void df_bb_rd_local_compute (struct df *, basic_block, bitmap);
254static void df_rd_local_compute (struct df *, bitmap);
255static void df_bb_ru_local_compute (struct df *, basic_block);
256static void df_ru_local_compute (struct df *, bitmap);
257static void df_bb_lr_local_compute (struct df *, basic_block);
258static void df_lr_local_compute (struct df *, bitmap);
259static void df_bb_reg_info_compute (struct df *, basic_block, bitmap);
260static void df_reg_info_compute (struct df *, bitmap);
261
262static int df_bb_luids_set (struct df *df, basic_block);
263static int df_luids_set (struct df *df, bitmap);
264
265static int df_modified_p (struct df *, bitmap);
266static int df_refs_queue (struct df *);
267static int df_refs_process (struct df *);
268static int df_bb_refs_update (struct df *, basic_block);
269static int df_refs_update (struct df *, bitmap);
270static void df_analyze_1 (struct df *, bitmap, int, int);
271
272static void df_insns_modify (struct df *, basic_block, rtx, rtx);
273static int df_rtx_mem_replace (rtx *, void *);
274static int df_rtx_reg_replace (rtx *, void *);
275void df_refs_reg_replace (struct df *, bitmap, struct df_link *, rtx, rtx);
276
277static int df_def_dominates_all_uses_p (struct df *, struct ref *def);
278static int df_def_dominates_uses_p (struct df *, struct ref *def, bitmap);
279static struct ref *df_bb_insn_regno_last_use_find (struct df *, basic_block,
280						   rtx, unsigned int);
281static struct ref *df_bb_insn_regno_first_def_find (struct df *, basic_block,
282						    rtx, unsigned int);
283
284static void df_chain_dump (struct df_link *, FILE *file);
285static void df_chain_dump_regno (struct df_link *, FILE *file);
286static void df_regno_debug (struct df *, unsigned int, FILE *);
287static void df_ref_debug (struct df *, struct ref *, FILE *);
288static void df_rd_transfer_function (int, int *, void *, void *, void *,
289				     void *, void *);
290static void df_ru_transfer_function (int, int *, void *, void *, void *,
291				     void *, void *);
292static void df_lr_transfer_function (int, int *, void *, void *, void *,
293				     void *, void *);
294static void hybrid_search (basic_block, struct dataflow *,
295			   sbitmap, sbitmap, sbitmap);
296
297
298/* Local memory allocation/deallocation routines.  */
299
300
301/* Increase the insn info table to have space for at least SIZE + 1
302   elements.  */
303static void
304df_insn_table_realloc (struct df *df, unsigned int size)
305{
306  size++;
307  if (size <= df->insn_size)
308    return;
309
310  /* Make the table a little larger than requested, so we do not need
311     to enlarge it so often.  */
312  size += df->insn_size / 4;
313
314  df->insns = xrealloc (df->insns, size * sizeof (struct insn_info));
315
316  memset (df->insns + df->insn_size, 0,
317	  (size - df->insn_size) * sizeof (struct insn_info));
318
319  df->insn_size = size;
320
321  if (! df->insns_modified)
322    {
323      df->insns_modified = BITMAP_ALLOC (NULL);
324      bitmap_zero (df->insns_modified);
325    }
326}
327
328/* Increase the bb info table to have space for at least SIZE + 1
329   elements.  */
330
331static void
332df_bb_table_realloc (struct df *df, unsigned int size)
333{
334  size++;
335  if (size <= df->n_bbs)
336    return;
337
338  /* Make the table a little larger than requested, so we do not need
339     to enlarge it so often.  */
340  size += df->n_bbs / 4;
341
342  df->bbs = xrealloc (df->bbs, size * sizeof (struct bb_info));
343
344  memset (df->bbs + df->n_bbs, 0, (size - df->n_bbs) * sizeof (struct bb_info));
345
346  df->n_bbs = size;
347}
348
349/* Increase the reg info table by SIZE more elements.  */
350static void
351df_reg_table_realloc (struct df *df, int size)
352{
353  /* Make table 25 percent larger by default.  */
354  if (! size)
355    size = df->reg_size / 4;
356
357  size += df->reg_size;
358  if (size < max_reg_num ())
359    size = max_reg_num ();
360
361  df->regs = xrealloc (df->regs, size * sizeof (struct reg_info));
362  df->reg_def_last = xrealloc (df->reg_def_last,
363			       size * sizeof (struct ref *));
364
365  /* Zero the new entries.  */
366  memset (df->regs + df->reg_size, 0,
367	  (size - df->reg_size) * sizeof (struct reg_info));
368
369  df->reg_size = size;
370}
371
372
373/* Allocate bitmaps for each basic block.  */
374
375static void
376df_bitmaps_alloc (struct df *df, bitmap blocks, int flags)
377{
378  basic_block bb;
379
380  df->n_defs = df->def_id;
381  df->n_uses = df->use_id;
382
383  if (!blocks)
384    blocks = df->all_blocks;
385
386  FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
387    {
388      struct bb_info *bb_info = DF_BB_INFO (df, bb);
389
390      if (flags & DF_RD)
391	{
392	  if (!bb_info->rd_in)
393	    {
394	      /* Allocate bitmaps for reaching definitions.  */
395	      bb_info->rd_kill = BITMAP_ALLOC (NULL);
396	      bb_info->rd_gen = BITMAP_ALLOC (NULL);
397	      bb_info->rd_in = BITMAP_ALLOC (NULL);
398	      bb_info->rd_out = BITMAP_ALLOC (NULL);
399	    }
400	  else
401	    {
402	      bitmap_clear (bb_info->rd_kill);
403	      bitmap_clear (bb_info->rd_gen);
404	      bitmap_clear (bb_info->rd_in);
405	      bitmap_clear (bb_info->rd_out);
406	    }
407	}
408
409      if (flags & DF_RU)
410	{
411	  if (!bb_info->ru_in)
412	    {
413	      /* Allocate bitmaps for upward exposed uses.  */
414	      bb_info->ru_kill = BITMAP_ALLOC (NULL);
415	      bb_info->ru_gen = BITMAP_ALLOC (NULL);
416	      bb_info->ru_in = BITMAP_ALLOC (NULL);
417	      bb_info->ru_out = BITMAP_ALLOC (NULL);
418	    }
419	  else
420	    {
421	      bitmap_clear (bb_info->ru_kill);
422	      bitmap_clear (bb_info->ru_gen);
423	      bitmap_clear (bb_info->ru_in);
424	      bitmap_clear (bb_info->ru_out);
425	    }
426	}
427
428      if (flags & DF_LR)
429	{
430	  if (!bb_info->lr_in)
431	    {
432	      /* Allocate bitmaps for live variables.  */
433	      bb_info->lr_def = BITMAP_ALLOC (NULL);
434	      bb_info->lr_use = BITMAP_ALLOC (NULL);
435	      bb_info->lr_in = BITMAP_ALLOC (NULL);
436	      bb_info->lr_out = BITMAP_ALLOC (NULL);
437	    }
438	  else
439	    {
440	      bitmap_clear (bb_info->lr_def);
441	      bitmap_clear (bb_info->lr_use);
442	      bitmap_clear (bb_info->lr_in);
443	      bitmap_clear (bb_info->lr_out);
444	    }
445	}
446    });
447}
448
449
450/* Free bitmaps for each basic block.  */
451static void
452df_bitmaps_free (struct df *df, int flags)
453{
454  basic_block bb;
455
456  FOR_EACH_BB (bb)
457    {
458      struct bb_info *bb_info = DF_BB_INFO (df, bb);
459
460      if (!bb_info)
461	continue;
462
463      if ((flags & DF_RD) && bb_info->rd_in)
464	{
465	  /* Free bitmaps for reaching definitions.  */
466	  BITMAP_FREE (bb_info->rd_kill);
467	  bb_info->rd_kill = NULL;
468	  BITMAP_FREE (bb_info->rd_gen);
469	  bb_info->rd_gen = NULL;
470	  BITMAP_FREE (bb_info->rd_in);
471	  bb_info->rd_in = NULL;
472	  BITMAP_FREE (bb_info->rd_out);
473	  bb_info->rd_out = NULL;
474	}
475
476      if ((flags & DF_RU) && bb_info->ru_in)
477	{
478	  /* Free bitmaps for upward exposed uses.  */
479	  BITMAP_FREE (bb_info->ru_kill);
480	  bb_info->ru_kill = NULL;
481	  BITMAP_FREE (bb_info->ru_gen);
482	  bb_info->ru_gen = NULL;
483	  BITMAP_FREE (bb_info->ru_in);
484	  bb_info->ru_in = NULL;
485	  BITMAP_FREE (bb_info->ru_out);
486	  bb_info->ru_out = NULL;
487	}
488
489      if ((flags & DF_LR) && bb_info->lr_in)
490	{
491	  /* Free bitmaps for live variables.  */
492	  BITMAP_FREE (bb_info->lr_def);
493	  bb_info->lr_def = NULL;
494	  BITMAP_FREE (bb_info->lr_use);
495	  bb_info->lr_use = NULL;
496	  BITMAP_FREE (bb_info->lr_in);
497	  bb_info->lr_in = NULL;
498	  BITMAP_FREE (bb_info->lr_out);
499	  bb_info->lr_out = NULL;
500	}
501    }
502  df->flags &= ~(flags & (DF_RD | DF_RU | DF_LR));
503}
504
505
506/* Allocate and initialize dataflow memory.  */
507static void
508df_alloc (struct df *df, int n_regs)
509{
510  int n_insns;
511  basic_block bb;
512
513  df_link_pool = create_alloc_pool ("df_link pool", sizeof (struct df_link),
514				    100);
515  df_ref_pool  = create_alloc_pool ("df_ref pool", sizeof (struct ref), 100);
516
517  /* Perhaps we should use LUIDs to save memory for the insn_refs
518     table.  This is only a small saving; a few pointers.  */
519  n_insns = get_max_uid () + 1;
520
521  df->def_id = 0;
522  df->n_defs = 0;
523  /* Approximate number of defs by number of insns.  */
524  df->def_size = n_insns;
525  df->defs = xmalloc (df->def_size * sizeof (*df->defs));
526
527  df->use_id = 0;
528  df->n_uses = 0;
529  /* Approximate number of uses by twice number of insns.  */
530  df->use_size = n_insns * 2;
531  df->uses = xmalloc (df->use_size * sizeof (*df->uses));
532
533  df->n_regs = n_regs;
534  df->n_bbs = last_basic_block;
535
536  /* Allocate temporary working array used during local dataflow analysis.  */
537  df_insn_table_realloc (df, n_insns);
538
539  df_reg_table_realloc (df, df->n_regs);
540
541  df->bbs_modified = BITMAP_ALLOC (NULL);
542  bitmap_zero (df->bbs_modified);
543
544  df->flags = 0;
545
546  df->bbs = xcalloc (last_basic_block, sizeof (struct bb_info));
547
548  df->all_blocks = BITMAP_ALLOC (NULL);
549  FOR_EACH_BB (bb)
550    bitmap_set_bit (df->all_blocks, bb->index);
551}
552
553
554/* Free all the dataflow info.  */
555static void
556df_free (struct df *df)
557{
558  df_bitmaps_free (df, DF_ALL);
559
560  if (df->bbs)
561    free (df->bbs);
562  df->bbs = 0;
563
564  if (df->insns)
565    free (df->insns);
566  df->insns = 0;
567  df->insn_size = 0;
568
569  if (df->defs)
570    free (df->defs);
571  df->defs = 0;
572  df->def_size = 0;
573  df->def_id = 0;
574
575  if (df->uses)
576    free (df->uses);
577  df->uses = 0;
578  df->use_size = 0;
579  df->use_id = 0;
580
581  if (df->regs)
582    free (df->regs);
583  df->regs = 0;
584  df->reg_size = 0;
585
586  BITMAP_FREE (df->bbs_modified);
587  df->bbs_modified = 0;
588
589  BITMAP_FREE (df->insns_modified);
590  df->insns_modified = 0;
591
592  BITMAP_FREE (df->all_blocks);
593  df->all_blocks = 0;
594
595  free_alloc_pool (df_ref_pool);
596  free_alloc_pool (df_link_pool);
597}
598
599/* Local miscellaneous routines.  */
600
601/* Return a USE for register REGNO.  */
602static rtx df_reg_use_gen (unsigned int regno)
603{
604  rtx reg;
605  rtx use;
606
607  reg = regno_reg_rtx[regno];
608
609  use = gen_rtx_USE (GET_MODE (reg), reg);
610  return use;
611}
612
613/* Local chain manipulation routines.  */
614
615/* Create a link in a def-use or use-def chain.  */
616static inline struct df_link *
617df_link_create (struct ref *ref, struct df_link *next)
618{
619  struct df_link *link;
620
621  link = pool_alloc (df_link_pool);
622  link->next = next;
623  link->ref = ref;
624  return link;
625}
626
627/* Releases members of the CHAIN.  */
628
629static void
630free_reg_ref_chain (struct df_link **chain)
631{
632  struct df_link *act, *next;
633
634  for (act = *chain; act; act = next)
635    {
636      next = act->next;
637      pool_free (df_link_pool, act);
638    }
639
640  *chain = NULL;
641}
642
643/* Add REF to chain head pointed to by PHEAD.  */
644static struct df_link *
645df_ref_unlink (struct df_link **phead, struct ref *ref)
646{
647  struct df_link *link = *phead;
648
649  if (link)
650    {
651      if (! link->next)
652	{
653	  /* Only a single ref.  It must be the one we want.
654	     If not, the def-use and use-def chains are likely to
655	     be inconsistent.  */
656	  gcc_assert (link->ref == ref);
657
658	  /* Now have an empty chain.  */
659	  *phead = NULL;
660	}
661      else
662	{
663	  /* Multiple refs.  One of them must be us.  */
664	  if (link->ref == ref)
665	    *phead = link->next;
666	  else
667	    {
668	      /* Follow chain.  */
669	      for (; link->next; link = link->next)
670		{
671		  if (link->next->ref == ref)
672		    {
673		      /* Unlink from list.  */
674		      link->next = link->next->next;
675		      return link->next;
676		    }
677		}
678	    }
679	}
680    }
681  return link;
682}
683
684
685/* Unlink REF from all def-use/use-def chains, etc.  */
686int
687df_ref_remove (struct df *df, struct ref *ref)
688{
689  if (DF_REF_REG_DEF_P (ref))
690    {
691      df_def_unlink (df, ref);
692      df_ref_unlink (&df->insns[DF_REF_INSN_UID (ref)].defs, ref);
693    }
694  else
695    {
696      df_use_unlink (df, ref);
697      df_ref_unlink (&df->insns[DF_REF_INSN_UID (ref)].uses, ref);
698    }
699  return 1;
700}
701
702
703/* Unlink DEF from use-def and reg-def chains.  */
704static void
705df_def_unlink (struct df *df ATTRIBUTE_UNUSED, struct ref *def)
706{
707  struct df_link *du_link;
708  unsigned int dregno = DF_REF_REGNO (def);
709
710  /* Follow def-use chain to find all the uses of this def.  */
711  for (du_link = DF_REF_CHAIN (def); du_link; du_link = du_link->next)
712    {
713      struct ref *use = du_link->ref;
714
715      /* Unlink this def from the use-def chain.  */
716      df_ref_unlink (&DF_REF_CHAIN (use), def);
717    }
718  DF_REF_CHAIN (def) = 0;
719
720  /* Unlink def from reg-def chain.  */
721  df_ref_unlink (&df->regs[dregno].defs, def);
722
723  df->defs[DF_REF_ID (def)] = 0;
724}
725
726
727/* Unlink use from def-use and reg-use chains.  */
728static void
729df_use_unlink (struct df *df ATTRIBUTE_UNUSED, struct ref *use)
730{
731  struct df_link *ud_link;
732  unsigned int uregno = DF_REF_REGNO (use);
733
734  /* Follow use-def chain to find all the defs of this use.  */
735  for (ud_link = DF_REF_CHAIN (use); ud_link; ud_link = ud_link->next)
736    {
737      struct ref *def = ud_link->ref;
738
739      /* Unlink this use from the def-use chain.  */
740      df_ref_unlink (&DF_REF_CHAIN (def), use);
741    }
742  DF_REF_CHAIN (use) = 0;
743
744  /* Unlink use from reg-use chain.  */
745  df_ref_unlink (&df->regs[uregno].uses, use);
746
747  df->uses[DF_REF_ID (use)] = 0;
748}
749
750/* Local routines for recording refs.  */
751
752
753/* Create a new ref of type DF_REF_TYPE for register REG at address
754   LOC within INSN of BB.  */
755static struct ref *
756df_ref_create (struct df *df, rtx reg, rtx *loc, rtx insn,
757	       enum df_ref_type ref_type, enum df_ref_flags ref_flags)
758{
759  struct ref *this_ref;
760
761  this_ref = pool_alloc (df_ref_pool);
762  DF_REF_REG (this_ref) = reg;
763  DF_REF_LOC (this_ref) = loc;
764  DF_REF_INSN (this_ref) = insn;
765  DF_REF_CHAIN (this_ref) = 0;
766  DF_REF_TYPE (this_ref) = ref_type;
767  DF_REF_FLAGS (this_ref) = ref_flags;
768  DF_REF_DATA (this_ref) = NULL;
769
770  if (ref_type == DF_REF_REG_DEF)
771    {
772      if (df->def_id >= df->def_size)
773	{
774	  /* Make table 25 percent larger.  */
775	  df->def_size += (df->def_size / 4);
776	  df->defs = xrealloc (df->defs,
777			       df->def_size * sizeof (*df->defs));
778	}
779      DF_REF_ID (this_ref) = df->def_id;
780      df->defs[df->def_id++] = this_ref;
781    }
782  else
783    {
784      if (df->use_id >= df->use_size)
785	{
786	  /* Make table 25 percent larger.  */
787	  df->use_size += (df->use_size / 4);
788	  df->uses = xrealloc (df->uses,
789			       df->use_size * sizeof (*df->uses));
790	}
791      DF_REF_ID (this_ref) = df->use_id;
792      df->uses[df->use_id++] = this_ref;
793    }
794  return this_ref;
795}
796
797
798/* Create a new reference of type DF_REF_TYPE for a single register REG,
799   used inside the LOC rtx of INSN.  */
800static void
801df_ref_record_1 (struct df *df, rtx reg, rtx *loc, rtx insn,
802		 enum df_ref_type ref_type, enum df_ref_flags ref_flags)
803{
804  df_ref_create (df, reg, loc, insn, ref_type, ref_flags);
805}
806
807
808/* Create new references of type DF_REF_TYPE for each part of register REG
809   at address LOC within INSN of BB.  */
810static void
811df_ref_record (struct df *df, rtx reg, rtx *loc, rtx insn,
812	       enum df_ref_type ref_type, enum df_ref_flags ref_flags)
813{
814  unsigned int regno;
815
816  gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
817
818  /* For the reg allocator we are interested in some SUBREG rtx's, but not
819     all.  Notably only those representing a word extraction from a multi-word
820     reg.  As written in the docu those should have the form
821     (subreg:SI (reg:M A) N), with size(SImode) > size(Mmode).
822     XXX Is that true?  We could also use the global word_mode variable.  */
823  if ((df->flags & DF_SUBREGS) == 0
824      && GET_CODE (reg) == SUBREG
825      && (GET_MODE_SIZE (GET_MODE (reg)) < GET_MODE_SIZE (word_mode)
826	  || GET_MODE_SIZE (GET_MODE (reg))
827	       >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (reg)))))
828    {
829      loc = &SUBREG_REG (reg);
830      reg = *loc;
831      ref_flags |= DF_REF_STRIPPED;
832    }
833
834  regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
835  if (regno < FIRST_PSEUDO_REGISTER)
836    {
837      int i;
838      int endregno;
839
840      if (! (df->flags & DF_HARD_REGS))
841	return;
842
843      /* GET_MODE (reg) is correct here.  We do not want to go into a SUBREG
844         for the mode, because we only want to add references to regs, which
845	 are really referenced.  E.g., a (subreg:SI (reg:DI 0) 0) does _not_
846	 reference the whole reg 0 in DI mode (which would also include
847	 reg 1, at least, if 0 and 1 are SImode registers).  */
848      endregno = hard_regno_nregs[regno][GET_MODE (reg)];
849      if (GET_CODE (reg) == SUBREG)
850        regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
851				      SUBREG_BYTE (reg), GET_MODE (reg));
852      endregno += regno;
853
854      for (i = regno; i < endregno; i++)
855	df_ref_record_1 (df, regno_reg_rtx[i],
856			 loc, insn, ref_type, ref_flags);
857    }
858  else
859    {
860      df_ref_record_1 (df, reg, loc, insn, ref_type, ref_flags);
861    }
862}
863
864
865/* A set to a non-paradoxical SUBREG for which the number of word_mode units
866   covered by the outer mode is smaller than that covered by the inner mode,
867   is a read-modify-write operation.
868   This function returns true iff the SUBREG X is such a SUBREG.  */
869bool
870read_modify_subreg_p (rtx x)
871{
872  unsigned int isize, osize;
873  if (GET_CODE (x) != SUBREG)
874    return false;
875  isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
876  osize = GET_MODE_SIZE (GET_MODE (x));
877  return (isize > osize && isize > UNITS_PER_WORD);
878}
879
880
881/* Process all the registers defined in the rtx, X.  */
882static void
883df_def_record_1 (struct df *df, rtx x, basic_block bb, rtx insn)
884{
885  rtx *loc;
886  rtx dst;
887  enum df_ref_flags flags = 0;
888
889 /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
890     construct.  */
891  if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
892    loc = &XEXP (x, 0);
893  else
894    loc = &SET_DEST (x);
895  dst = *loc;
896
897  /* Some targets place small structures in registers for
898     return values of functions.  */
899  if (GET_CODE (dst) == PARALLEL && GET_MODE (dst) == BLKmode)
900    {
901      int i;
902
903      for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
904	{
905	  rtx temp = XVECEXP (dst, 0, i);
906	  if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
907	      || GET_CODE (temp) == SET)
908	    df_def_record_1 (df, temp, bb, insn);
909	}
910      return;
911    }
912
913  /* Maybe, we should flag the use of STRICT_LOW_PART somehow.  It might
914     be handy for the reg allocator.  */
915  while (GET_CODE (dst) == STRICT_LOW_PART
916	 || GET_CODE (dst) == ZERO_EXTRACT
917	 || read_modify_subreg_p (dst))
918    {
919      /* Strict low part always contains SUBREG, but we do not want to make
920	 it appear outside, as whole register is always considered.  */
921      if (GET_CODE (dst) == STRICT_LOW_PART)
922	{
923	  loc = &XEXP (dst, 0);
924	  dst = *loc;
925	}
926      loc = &XEXP (dst, 0);
927      dst = *loc;
928      flags |= DF_REF_READ_WRITE;
929    }
930
931  if (REG_P (dst)
932      || (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst))))
933    df_ref_record (df, dst, loc, insn, DF_REF_REG_DEF, flags);
934}
935
936
937/* Process all the registers defined in the pattern rtx, X.  */
938static void
939df_defs_record (struct df *df, rtx x, basic_block bb, rtx insn)
940{
941  RTX_CODE code = GET_CODE (x);
942
943  if (code == SET || code == CLOBBER)
944    {
945      /* Mark the single def within the pattern.  */
946      df_def_record_1 (df, x, bb, insn);
947    }
948  else if (code == PARALLEL)
949    {
950      int i;
951
952      /* Mark the multiple defs within the pattern.  */
953      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
954	{
955	  code = GET_CODE (XVECEXP (x, 0, i));
956	  if (code == SET || code == CLOBBER)
957	    df_def_record_1 (df, XVECEXP (x, 0, i), bb, insn);
958	}
959    }
960}
961
962
963/* Process all the registers used in the rtx at address LOC.  */
964static void
965df_uses_record (struct df *df, rtx *loc, enum df_ref_type ref_type,
966		basic_block bb, rtx insn, enum df_ref_flags flags)
967{
968  RTX_CODE code;
969  rtx x;
970 retry:
971  x = *loc;
972  if (!x)
973    return;
974  code = GET_CODE (x);
975  switch (code)
976    {
977    case LABEL_REF:
978    case SYMBOL_REF:
979    case CONST_INT:
980    case CONST:
981    case CONST_DOUBLE:
982    case CONST_VECTOR:
983    case PC:
984    case CC0:
985    case ADDR_VEC:
986    case ADDR_DIFF_VEC:
987      return;
988
989    case CLOBBER:
990      /* If we are clobbering a MEM, mark any registers inside the address
991	 as being used.  */
992      if (MEM_P (XEXP (x, 0)))
993	df_uses_record (df, &XEXP (XEXP (x, 0), 0),
994			DF_REF_REG_MEM_STORE, bb, insn, flags);
995
996      /* If we're clobbering a REG then we have a def so ignore.  */
997      return;
998
999    case MEM:
1000      df_uses_record (df, &XEXP (x, 0), DF_REF_REG_MEM_LOAD, bb, insn, 0);
1001      return;
1002
1003    case SUBREG:
1004      /* While we're here, optimize this case.  */
1005
1006      /* In case the SUBREG is not of a REG, do not optimize.  */
1007      if (!REG_P (SUBREG_REG (x)))
1008	{
1009	  loc = &SUBREG_REG (x);
1010	  df_uses_record (df, loc, ref_type, bb, insn, flags);
1011	  return;
1012	}
1013      /* ... Fall through ...  */
1014
1015    case REG:
1016      df_ref_record (df, x, loc, insn, ref_type, flags);
1017      return;
1018
1019    case SET:
1020      {
1021	rtx dst = SET_DEST (x);
1022
1023	df_uses_record (df, &SET_SRC (x), DF_REF_REG_USE, bb, insn, 0);
1024
1025	switch (GET_CODE (dst))
1026	  {
1027	    case SUBREG:
1028	      if (read_modify_subreg_p (dst))
1029		{
1030		  df_uses_record (df, &SUBREG_REG (dst), DF_REF_REG_USE, bb,
1031				  insn, DF_REF_READ_WRITE);
1032		  break;
1033		}
1034	      /* Fall through.  */
1035	    case REG:
1036	    case PARALLEL:
1037	    case SCRATCH:
1038	    case PC:
1039	    case CC0:
1040		break;
1041	    case MEM:
1042	      df_uses_record (df, &XEXP (dst, 0),
1043			      DF_REF_REG_MEM_STORE,
1044			      bb, insn, 0);
1045	      break;
1046	    case STRICT_LOW_PART:
1047	      /* A strict_low_part uses the whole REG and not just the
1048		 SUBREG.  */
1049	      dst = XEXP (dst, 0);
1050	      gcc_assert (GET_CODE (dst) == SUBREG);
1051	      df_uses_record (df, &SUBREG_REG (dst), DF_REF_REG_USE, bb,
1052			     insn, DF_REF_READ_WRITE);
1053	      break;
1054	    case ZERO_EXTRACT:
1055	    case SIGN_EXTRACT:
1056	      df_uses_record (df, &XEXP (dst, 0), DF_REF_REG_USE, bb, insn,
1057			      DF_REF_READ_WRITE);
1058	      df_uses_record (df, &XEXP (dst, 1), DF_REF_REG_USE, bb, insn, 0);
1059	      df_uses_record (df, &XEXP (dst, 2), DF_REF_REG_USE, bb, insn, 0);
1060	      dst = XEXP (dst, 0);
1061	      break;
1062	    default:
1063	      gcc_unreachable ();
1064	  }
1065	return;
1066      }
1067
1068    case RETURN:
1069      break;
1070
1071    case ASM_OPERANDS:
1072    case UNSPEC_VOLATILE:
1073    case TRAP_IF:
1074    case ASM_INPUT:
1075      {
1076	/* Traditional and volatile asm instructions must be considered to use
1077	   and clobber all hard registers, all pseudo-registers and all of
1078	   memory.  So must TRAP_IF and UNSPEC_VOLATILE operations.
1079
1080	   Consider for instance a volatile asm that changes the fpu rounding
1081	   mode.  An insn should not be moved across this even if it only uses
1082	   pseudo-regs because it might give an incorrectly rounded result.
1083
1084	   For now, just mark any regs we can find in ASM_OPERANDS as
1085	   used.  */
1086
1087	/* For all ASM_OPERANDS, we must traverse the vector of input operands.
1088	   We can not just fall through here since then we would be confused
1089	   by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
1090	   traditional asms unlike their normal usage.  */
1091	if (code == ASM_OPERANDS)
1092	  {
1093	    int j;
1094
1095	    for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
1096	      df_uses_record (df, &ASM_OPERANDS_INPUT (x, j),
1097			      DF_REF_REG_USE, bb, insn, 0);
1098	    return;
1099	  }
1100	break;
1101      }
1102
1103    case PRE_DEC:
1104    case POST_DEC:
1105    case PRE_INC:
1106    case POST_INC:
1107    case PRE_MODIFY:
1108    case POST_MODIFY:
1109      /* Catch the def of the register being modified.  */
1110      df_ref_record (df, XEXP (x, 0), &XEXP (x, 0), insn, DF_REF_REG_DEF, DF_REF_READ_WRITE);
1111
1112      /* ... Fall through to handle uses ...  */
1113
1114    default:
1115      break;
1116    }
1117
1118  /* Recursively scan the operands of this expression.  */
1119  {
1120    const char *fmt = GET_RTX_FORMAT (code);
1121    int i;
1122
1123    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1124      {
1125	if (fmt[i] == 'e')
1126	  {
1127	    /* Tail recursive case: save a function call level.  */
1128	    if (i == 0)
1129	      {
1130		loc = &XEXP (x, 0);
1131		goto retry;
1132	      }
1133	    df_uses_record (df, &XEXP (x, i), ref_type, bb, insn, flags);
1134	  }
1135	else if (fmt[i] == 'E')
1136	  {
1137	    int j;
1138	    for (j = 0; j < XVECLEN (x, i); j++)
1139	      df_uses_record (df, &XVECEXP (x, i, j), ref_type,
1140			      bb, insn, flags);
1141	  }
1142      }
1143  }
1144}
1145
1146
1147/* Record all the df within INSN of basic block BB.  */
1148static void
1149df_insn_refs_record (struct df *df, basic_block bb, rtx insn)
1150{
1151  int i;
1152
1153  if (INSN_P (insn))
1154    {
1155      rtx note;
1156
1157      /* Record register defs.  */
1158      df_defs_record (df, PATTERN (insn), bb, insn);
1159
1160      if (df->flags & DF_EQUIV_NOTES)
1161	for (note = REG_NOTES (insn); note;
1162	     note = XEXP (note, 1))
1163	  {
1164	    switch (REG_NOTE_KIND (note))
1165	      {
1166	      case REG_EQUIV:
1167	      case REG_EQUAL:
1168		df_uses_record (df, &XEXP (note, 0), DF_REF_REG_USE,
1169				bb, insn, 0);
1170	      default:
1171		break;
1172	      }
1173	  }
1174
1175      if (CALL_P (insn))
1176	{
1177	  rtx note;
1178	  rtx x;
1179
1180	  /* Record the registers used to pass arguments.  */
1181	  for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
1182	       note = XEXP (note, 1))
1183	    {
1184	      if (GET_CODE (XEXP (note, 0)) == USE)
1185		df_uses_record (df, &XEXP (XEXP (note, 0), 0), DF_REF_REG_USE,
1186				bb, insn, 0);
1187	    }
1188
1189	  /* The stack ptr is used (honorarily) by a CALL insn.  */
1190	  x = df_reg_use_gen (STACK_POINTER_REGNUM);
1191	  df_uses_record (df, &XEXP (x, 0), DF_REF_REG_USE, bb, insn, 0);
1192
1193	  if (df->flags & DF_HARD_REGS)
1194	    {
1195	      /* Calls may also reference any of the global registers,
1196		 so they are recorded as used.  */
1197	      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1198		if (global_regs[i])
1199		  {
1200		    x = df_reg_use_gen (i);
1201		    df_uses_record (df, &XEXP (x, 0),
1202				    DF_REF_REG_USE, bb, insn, 0);
1203		  }
1204	    }
1205	}
1206
1207      /* Record the register uses.  */
1208      df_uses_record (df, &PATTERN (insn),
1209		      DF_REF_REG_USE, bb, insn, 0);
1210
1211      if (CALL_P (insn))
1212	{
1213	  rtx note;
1214
1215	  /* We do not record hard registers clobbered by the call,
1216	     since there are awfully many of them and "defs" created
1217	     through them are not interesting (since no use can be legally
1218	     reached by them).  So we must just make sure we include them when
1219	     computing kill bitmaps.  */
1220
1221	  /* There may be extra registers to be clobbered.  */
1222	  for (note = CALL_INSN_FUNCTION_USAGE (insn);
1223	       note;
1224	       note = XEXP (note, 1))
1225	    if (GET_CODE (XEXP (note, 0)) == CLOBBER)
1226	      df_defs_record (df, XEXP (note, 0), bb, insn);
1227	}
1228    }
1229}
1230
1231
1232/* Record all the refs within the basic block BB.  */
1233static void
1234df_bb_refs_record (struct df *df, basic_block bb)
1235{
1236  rtx insn;
1237
1238  /* Scan the block an insn at a time from beginning to end.  */
1239  FOR_BB_INSNS (bb, insn)
1240    {
1241      if (INSN_P (insn))
1242	{
1243	  /* Record defs within INSN.  */
1244	  df_insn_refs_record (df, bb, insn);
1245	}
1246    }
1247}
1248
1249
1250/* Record all the refs in the basic blocks specified by BLOCKS.  */
1251static void
1252df_refs_record (struct df *df, bitmap blocks)
1253{
1254  basic_block bb;
1255
1256  FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1257    {
1258      df_bb_refs_record (df, bb);
1259    });
1260}
1261
1262/* Dataflow analysis routines.  */
1263
1264/* Create reg-def chains for basic block BB.  These are a list of
1265   definitions for each register.  */
1266
1267static void
1268df_bb_reg_def_chain_create (struct df *df, basic_block bb)
1269{
1270  rtx insn;
1271
1272  /* Perhaps the defs should be sorted using a depth first search
1273     of the CFG (or possibly a breadth first search).  */
1274
1275  FOR_BB_INSNS_REVERSE (bb, insn)
1276    {
1277      struct df_link *link;
1278      unsigned int uid = INSN_UID (insn);
1279
1280      if (! INSN_P (insn))
1281	continue;
1282
1283      for (link = df->insns[uid].defs; link; link = link->next)
1284	{
1285	  struct ref *def = link->ref;
1286	  unsigned int dregno = DF_REF_REGNO (def);
1287
1288          /* Do not add ref's to the chain twice, i.e., only add new
1289             refs.  XXX the same could be done by testing if the
1290             current insn is a modified (or a new) one.  This would be
1291             faster.  */
1292          if (DF_REF_ID (def) < df->def_id_save)
1293            continue;
1294
1295	  df->regs[dregno].defs = df_link_create (def, df->regs[dregno].defs);
1296	}
1297    }
1298}
1299
1300
1301/* Create reg-def chains for each basic block within BLOCKS.  These
1302   are a list of definitions for each register.  If REDO is true, add
1303   all defs, otherwise just add the new defs.  */
1304
1305static void
1306df_reg_def_chain_create (struct df *df, bitmap blocks, bool redo)
1307{
1308  basic_block bb;
1309#ifdef ENABLE_CHECKING
1310  unsigned regno;
1311#endif
1312  unsigned old_def_id_save = df->def_id_save;
1313
1314  if (redo)
1315    {
1316#ifdef ENABLE_CHECKING
1317      for (regno = 0; regno < df->n_regs; regno++)
1318	gcc_assert (!df->regs[regno].defs);
1319#endif
1320
1321      /* Pretend that all defs are new.  */
1322      df->def_id_save = 0;
1323    }
1324
1325  FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1326    {
1327      df_bb_reg_def_chain_create (df, bb);
1328    });
1329
1330  df->def_id_save = old_def_id_save;
1331}
1332
1333/* Remove all reg-def chains stored in the dataflow object DF.  */
1334
1335static void
1336df_reg_def_chain_clean (struct df *df)
1337{
1338  unsigned regno;
1339
1340  for (regno = 0; regno < df->n_regs; regno++)
1341    free_reg_ref_chain (&df->regs[regno].defs);
1342}
1343
1344/* Create reg-use chains for basic block BB.  These are a list of uses
1345   for each register.  */
1346
1347static void
1348df_bb_reg_use_chain_create (struct df *df, basic_block bb)
1349{
1350  rtx insn;
1351
1352  /* Scan in forward order so that the last uses appear at the start
1353     of the chain.  */
1354
1355  FOR_BB_INSNS (bb, insn)
1356    {
1357      struct df_link *link;
1358      unsigned int uid = INSN_UID (insn);
1359
1360      if (! INSN_P (insn))
1361	continue;
1362
1363      for (link = df->insns[uid].uses; link; link = link->next)
1364	{
1365	  struct ref *use = link->ref;
1366	  unsigned int uregno = DF_REF_REGNO (use);
1367
1368          /* Do not add ref's to the chain twice, i.e., only add new
1369             refs.  XXX the same could be done by testing if the
1370             current insn is a modified (or a new) one.  This would be
1371             faster.  */
1372          if (DF_REF_ID (use) < df->use_id_save)
1373            continue;
1374
1375	  df->regs[uregno].uses
1376	    = df_link_create (use, df->regs[uregno].uses);
1377	}
1378    }
1379}
1380
1381
1382/* Create reg-use chains for each basic block within BLOCKS.  These
1383   are a list of uses for each register.  If REDO is true, remove the
1384   old reg-use chains first, otherwise just add new uses to them.  */
1385
1386static void
1387df_reg_use_chain_create (struct df *df, bitmap blocks, bool redo)
1388{
1389  basic_block bb;
1390#ifdef ENABLE_CHECKING
1391  unsigned regno;
1392#endif
1393  unsigned old_use_id_save = df->use_id_save;
1394
1395  if (redo)
1396    {
1397#ifdef ENABLE_CHECKING
1398      for (regno = 0; regno < df->n_regs; regno++)
1399	gcc_assert (!df->regs[regno].uses);
1400#endif
1401
1402      /* Pretend that all uses are new.  */
1403      df->use_id_save = 0;
1404    }
1405
1406  FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1407    {
1408      df_bb_reg_use_chain_create (df, bb);
1409    });
1410
1411  df->use_id_save = old_use_id_save;
1412}
1413
1414/* Remove all reg-use chains stored in the dataflow object DF.  */
1415
1416static void
1417df_reg_use_chain_clean (struct df *df)
1418{
1419  unsigned regno;
1420
1421  for (regno = 0; regno < df->n_regs; regno++)
1422    free_reg_ref_chain (&df->regs[regno].uses);
1423}
1424
1425/* Create def-use chains from reaching use bitmaps for basic block BB.  */
1426static void
1427df_bb_du_chain_create (struct df *df, basic_block bb, bitmap ru)
1428{
1429  struct bb_info *bb_info = DF_BB_INFO (df, bb);
1430  rtx insn;
1431
1432  bitmap_copy (ru, bb_info->ru_out);
1433
1434  /* For each def in BB create a linked list (chain) of uses
1435     reached from the def.  */
1436  FOR_BB_INSNS_REVERSE (bb, insn)
1437    {
1438      struct df_link *def_link;
1439      struct df_link *use_link;
1440      unsigned int uid = INSN_UID (insn);
1441
1442      if (! INSN_P (insn))
1443	continue;
1444
1445      /* For each def in insn...  */
1446      for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1447	{
1448	  struct ref *def = def_link->ref;
1449	  unsigned int dregno = DF_REF_REGNO (def);
1450
1451	  DF_REF_CHAIN (def) = 0;
1452
1453	  /* While the reg-use chains are not essential, it
1454	     is _much_ faster to search these short lists rather
1455	     than all the reaching uses, especially for large functions.  */
1456	  for (use_link = df->regs[dregno].uses; use_link;
1457	       use_link = use_link->next)
1458	    {
1459	      struct ref *use = use_link->ref;
1460
1461	      if (bitmap_bit_p (ru, DF_REF_ID (use)))
1462		{
1463		  DF_REF_CHAIN (def)
1464		    = df_link_create (use, DF_REF_CHAIN (def));
1465
1466		  bitmap_clear_bit (ru, DF_REF_ID (use));
1467		}
1468	    }
1469	}
1470
1471      /* For each use in insn...  */
1472      for (use_link = df->insns[uid].uses; use_link; use_link = use_link->next)
1473	{
1474	  struct ref *use = use_link->ref;
1475	  bitmap_set_bit (ru, DF_REF_ID (use));
1476	}
1477    }
1478}
1479
1480
1481/* Create def-use chains from reaching use bitmaps for basic blocks
1482   in BLOCKS.  */
1483static void
1484df_du_chain_create (struct df *df, bitmap blocks)
1485{
1486  bitmap ru;
1487  basic_block bb;
1488
1489  ru = BITMAP_ALLOC (NULL);
1490
1491  FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1492    {
1493      df_bb_du_chain_create (df, bb, ru);
1494    });
1495
1496  BITMAP_FREE (ru);
1497}
1498
1499
1500/* Create use-def chains from reaching def bitmaps for basic block BB.  */
1501static void
1502df_bb_ud_chain_create (struct df *df, basic_block bb)
1503{
1504  struct bb_info *bb_info = DF_BB_INFO (df, bb);
1505  struct ref **reg_def_last = df->reg_def_last;
1506  rtx insn;
1507
1508  memset (reg_def_last, 0, df->n_regs * sizeof (struct ref *));
1509
1510  /* For each use in BB create a linked list (chain) of defs
1511     that reach the use.  */
1512  FOR_BB_INSNS (bb, insn)
1513    {
1514      unsigned int uid = INSN_UID (insn);
1515      struct df_link *use_link;
1516      struct df_link *def_link;
1517
1518      if (! INSN_P (insn))
1519	continue;
1520
1521      /* For each use in insn...  */
1522      for (use_link = df->insns[uid].uses; use_link; use_link = use_link->next)
1523	{
1524	  struct ref *use = use_link->ref;
1525	  unsigned int regno = DF_REF_REGNO (use);
1526
1527	  DF_REF_CHAIN (use) = 0;
1528
1529	  /* Has regno been defined in this BB yet?  If so, use
1530	     the last def as the single entry for the use-def
1531	     chain for this use.  Otherwise, we need to add all
1532	     the defs using this regno that reach the start of
1533	     this BB.  */
1534	  if (reg_def_last[regno])
1535	    {
1536	      DF_REF_CHAIN (use)
1537		= df_link_create (reg_def_last[regno], 0);
1538	    }
1539	  else
1540	    {
1541	      /* While the reg-def chains are not essential, it is
1542		 _much_ faster to search these short lists rather than
1543		 all the reaching defs, especially for large
1544		 functions.  */
1545	      for (def_link = df->regs[regno].defs; def_link;
1546		   def_link = def_link->next)
1547		{
1548		  struct ref *def = def_link->ref;
1549
1550		  if (bitmap_bit_p (bb_info->rd_in, DF_REF_ID (def)))
1551		    {
1552		      DF_REF_CHAIN (use)
1553			= df_link_create (def, DF_REF_CHAIN (use));
1554		    }
1555		}
1556	    }
1557	}
1558
1559
1560      /* For each def in insn... record the last def of each reg.  */
1561      for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1562	{
1563	  struct ref *def = def_link->ref;
1564	  int dregno = DF_REF_REGNO (def);
1565
1566	  reg_def_last[dregno] = def;
1567	}
1568    }
1569}
1570
1571
1572/* Create use-def chains from reaching def bitmaps for basic blocks
1573   within BLOCKS.  */
1574static void
1575df_ud_chain_create (struct df *df, bitmap blocks)
1576{
1577  basic_block bb;
1578
1579  FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1580    {
1581      df_bb_ud_chain_create (df, bb);
1582    });
1583}
1584
1585
1586
1587static void
1588df_rd_transfer_function (int bb ATTRIBUTE_UNUSED, int *changed, void *in,
1589			 void *out, void *gen, void *kill,
1590			 void *data ATTRIBUTE_UNUSED)
1591{
1592  *changed = bitmap_ior_and_compl (out, gen, in, kill);
1593}
1594
1595
1596static void
1597df_ru_transfer_function (int bb ATTRIBUTE_UNUSED, int *changed, void *in,
1598			 void *out, void *gen, void *kill,
1599			 void *data ATTRIBUTE_UNUSED)
1600{
1601  *changed = bitmap_ior_and_compl (in, gen, out, kill);
1602}
1603
1604
1605static void
1606df_lr_transfer_function (int bb ATTRIBUTE_UNUSED, int *changed, void *in,
1607			 void *out, void *use, void *def,
1608			 void *data ATTRIBUTE_UNUSED)
1609{
1610  *changed = bitmap_ior_and_compl (in, use, out, def);
1611}
1612
1613
1614/* Compute local reaching def info for basic block BB.  */
1615static void
1616df_bb_rd_local_compute (struct df *df, basic_block bb, bitmap call_killed_defs)
1617{
1618  struct bb_info *bb_info = DF_BB_INFO (df, bb);
1619  rtx insn;
1620  bitmap seen = BITMAP_ALLOC (NULL);
1621  bool call_seen = false;
1622
1623  FOR_BB_INSNS_REVERSE (bb, insn)
1624    {
1625      unsigned int uid = INSN_UID (insn);
1626      struct df_link *def_link;
1627
1628      if (! INSN_P (insn))
1629	continue;
1630
1631      for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1632	{
1633	  struct ref *def = def_link->ref;
1634	  unsigned int regno = DF_REF_REGNO (def);
1635	  struct df_link *def2_link;
1636
1637	  if (bitmap_bit_p (seen, regno)
1638	      || (call_seen
1639		  && regno < FIRST_PSEUDO_REGISTER
1640		  && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)))
1641	    continue;
1642
1643	  for (def2_link = df->regs[regno].defs; def2_link;
1644	       def2_link = def2_link->next)
1645	    {
1646	      struct ref *def2 = def2_link->ref;
1647
1648	      /* Add all defs of this reg to the set of kills.  This
1649		 is greedy since many of these defs will not actually
1650		 be killed by this BB but it keeps things a lot
1651		 simpler.  */
1652	      bitmap_set_bit (bb_info->rd_kill, DF_REF_ID (def2));
1653	    }
1654
1655	  bitmap_set_bit (bb_info->rd_gen, DF_REF_ID (def));
1656	  bitmap_set_bit (seen, regno);
1657	}
1658
1659      if (CALL_P (insn) && (df->flags & DF_HARD_REGS))
1660	{
1661	  bitmap_ior_into (bb_info->rd_kill, call_killed_defs);
1662	  call_seen = 1;
1663	}
1664    }
1665
1666  BITMAP_FREE (seen);
1667}
1668
1669
1670/* Compute local reaching def info for each basic block within BLOCKS.  */
1671static void
1672df_rd_local_compute (struct df *df, bitmap blocks)
1673{
1674  basic_block bb;
1675  bitmap killed_by_call = NULL;
1676  unsigned regno;
1677  struct df_link *def_link;
1678
1679  if (df->flags & DF_HARD_REGS)
1680    {
1681      killed_by_call = BITMAP_ALLOC (NULL);
1682      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1683	{
1684	  if (!TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1685	    continue;
1686
1687	  for (def_link = df->regs[regno].defs;
1688	       def_link;
1689	       def_link = def_link->next)
1690	    bitmap_set_bit (killed_by_call, DF_REF_ID (def_link->ref));
1691	}
1692    }
1693
1694  FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1695  {
1696    df_bb_rd_local_compute (df, bb, killed_by_call);
1697  });
1698
1699  if (df->flags & DF_HARD_REGS)
1700    BITMAP_FREE (killed_by_call);
1701}
1702
1703
1704/* Compute local reaching use (upward exposed use) info for basic
1705   block BB.  */
1706static void
1707df_bb_ru_local_compute (struct df *df, basic_block bb)
1708{
1709  /* This is much more tricky than computing reaching defs.  With
1710     reaching defs, defs get killed by other defs.  With upwards
1711     exposed uses, these get killed by defs with the same regno.  */
1712
1713  struct bb_info *bb_info = DF_BB_INFO (df, bb);
1714  rtx insn;
1715
1716
1717  FOR_BB_INSNS_REVERSE (bb, insn)
1718    {
1719      unsigned int uid = INSN_UID (insn);
1720      struct df_link *def_link;
1721      struct df_link *use_link;
1722
1723      if (! INSN_P (insn))
1724	continue;
1725
1726      for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1727	{
1728	  struct ref *def = def_link->ref;
1729	  unsigned int dregno = DF_REF_REGNO (def);
1730
1731	  for (use_link = df->regs[dregno].uses; use_link;
1732	       use_link = use_link->next)
1733	    {
1734	      struct ref *use = use_link->ref;
1735
1736	      /* Add all uses of this reg to the set of kills.  This
1737		 is greedy since many of these uses will not actually
1738		 be killed by this BB but it keeps things a lot
1739		 simpler.  */
1740	      bitmap_set_bit (bb_info->ru_kill, DF_REF_ID (use));
1741
1742	      /* Zap from the set of gens for this BB.  */
1743	      bitmap_clear_bit (bb_info->ru_gen, DF_REF_ID (use));
1744	    }
1745	}
1746
1747      for (use_link = df->insns[uid].uses; use_link; use_link = use_link->next)
1748	{
1749	  struct ref *use = use_link->ref;
1750	  /* Add use to set of gens in this BB.  */
1751	  bitmap_set_bit (bb_info->ru_gen, DF_REF_ID (use));
1752	}
1753    }
1754}
1755
1756
1757/* Compute local reaching use (upward exposed use) info for each basic
1758   block within BLOCKS.  */
1759static void
1760df_ru_local_compute (struct df *df, bitmap blocks)
1761{
1762  basic_block bb;
1763
1764  FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1765  {
1766    df_bb_ru_local_compute (df, bb);
1767  });
1768}
1769
1770
1771/* Compute local live variable info for basic block BB.  */
1772static void
1773df_bb_lr_local_compute (struct df *df, basic_block bb)
1774{
1775  struct bb_info *bb_info = DF_BB_INFO (df, bb);
1776  rtx insn;
1777
1778  FOR_BB_INSNS_REVERSE (bb, insn)
1779    {
1780      unsigned int uid = INSN_UID (insn);
1781      struct df_link *link;
1782
1783      if (! INSN_P (insn))
1784	continue;
1785
1786      for (link = df->insns[uid].defs; link; link = link->next)
1787	{
1788	  struct ref *def = link->ref;
1789	  unsigned int dregno = DF_REF_REGNO (def);
1790
1791	  /* Add def to set of defs in this BB.  */
1792	  bitmap_set_bit (bb_info->lr_def, dregno);
1793
1794	  bitmap_clear_bit (bb_info->lr_use, dregno);
1795	}
1796
1797      for (link = df->insns[uid].uses; link; link = link->next)
1798	{
1799	  struct ref *use = link->ref;
1800	  /* Add use to set of uses in this BB.  */
1801	  bitmap_set_bit (bb_info->lr_use, DF_REF_REGNO (use));
1802	}
1803    }
1804}
1805
1806
1807/* Compute local live variable info for each basic block within BLOCKS.  */
1808static void
1809df_lr_local_compute (struct df *df, bitmap blocks)
1810{
1811  basic_block bb;
1812
1813  FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1814  {
1815    df_bb_lr_local_compute (df, bb);
1816  });
1817}
1818
1819
1820/* Compute register info: lifetime, bb, and number of defs and uses
1821   for basic block BB.  */
1822static void
1823df_bb_reg_info_compute (struct df *df, basic_block bb, bitmap live)
1824{
1825  struct reg_info *reg_info = df->regs;
1826  struct bb_info *bb_info = DF_BB_INFO (df, bb);
1827  rtx insn;
1828
1829  bitmap_copy (live, bb_info->lr_out);
1830
1831  FOR_BB_INSNS_REVERSE (bb, insn)
1832    {
1833      unsigned int uid = INSN_UID (insn);
1834      unsigned int regno;
1835      struct df_link *link;
1836      bitmap_iterator bi;
1837
1838      if (! INSN_P (insn))
1839	continue;
1840
1841      for (link = df->insns[uid].defs; link; link = link->next)
1842	{
1843	  struct ref *def = link->ref;
1844	  unsigned int dregno = DF_REF_REGNO (def);
1845
1846	  /* Kill this register.  */
1847	  bitmap_clear_bit (live, dregno);
1848	  reg_info[dregno].n_defs++;
1849	}
1850
1851      for (link = df->insns[uid].uses; link; link = link->next)
1852	{
1853	  struct ref *use = link->ref;
1854	  unsigned int uregno = DF_REF_REGNO (use);
1855
1856	  /* This register is now live.  */
1857	  bitmap_set_bit (live, uregno);
1858	  reg_info[uregno].n_uses++;
1859	}
1860
1861      /* Increment lifetimes of all live registers.  */
1862      EXECUTE_IF_SET_IN_BITMAP (live, 0, regno, bi)
1863	{
1864	  reg_info[regno].lifetime++;
1865	}
1866    }
1867}
1868
1869
1870/* Compute register info: lifetime, bb, and number of defs and uses.  */
1871static void
1872df_reg_info_compute (struct df *df, bitmap blocks)
1873{
1874  basic_block bb;
1875  bitmap live;
1876
1877  live = BITMAP_ALLOC (NULL);
1878
1879  FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1880  {
1881    df_bb_reg_info_compute (df, bb, live);
1882  });
1883
1884  BITMAP_FREE (live);
1885}
1886
1887
1888/* Assign LUIDs for BB.  */
1889static int
1890df_bb_luids_set (struct df *df, basic_block bb)
1891{
1892  rtx insn;
1893  int luid = 0;
1894
1895  /* The LUIDs are monotonically increasing for each basic block.  */
1896
1897  FOR_BB_INSNS (bb, insn)
1898    {
1899      if (INSN_P (insn))
1900	DF_INSN_LUID (df, insn) = luid++;
1901      DF_INSN_LUID (df, insn) = luid;
1902    }
1903  return luid;
1904}
1905
1906
1907/* Assign LUIDs for each basic block within BLOCKS.  */
1908static int
1909df_luids_set (struct df *df, bitmap blocks)
1910{
1911  basic_block bb;
1912  int total = 0;
1913
1914  FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1915    {
1916      total += df_bb_luids_set (df, bb);
1917    });
1918  return total;
1919}
1920
1921
1922/* Perform dataflow analysis using existing DF structure for blocks
1923   within BLOCKS.  If BLOCKS is zero, use all basic blocks in the CFG.  */
1924static void
1925df_analyze_1 (struct df *df, bitmap blocks, int flags, int update)
1926{
1927  int aflags;
1928  int dflags;
1929  int i;
1930  basic_block bb;
1931  struct dataflow dflow;
1932
1933  dflags = 0;
1934  aflags = flags;
1935  if (flags & DF_UD_CHAIN)
1936    aflags |= DF_RD | DF_RD_CHAIN;
1937
1938  if (flags & DF_DU_CHAIN)
1939    aflags |= DF_RU;
1940
1941  if (flags & DF_RU)
1942    aflags |= DF_RU_CHAIN;
1943
1944  if (flags & DF_REG_INFO)
1945    aflags |= DF_LR;
1946
1947  if (! blocks)
1948    blocks = df->all_blocks;
1949
1950  df->flags = flags;
1951  if (update)
1952    {
1953      df_refs_update (df, NULL);
1954      /* More fine grained incremental dataflow analysis would be
1955	 nice.  For now recompute the whole shebang for the
1956	 modified blocks.  */
1957#if 0
1958      df_refs_unlink (df, blocks);
1959#endif
1960      /* All the def-use, use-def chains can be potentially
1961	 modified by changes in one block.  The size of the
1962	 bitmaps can also change.  */
1963    }
1964  else
1965    {
1966      /* Scan the function for all register defs and uses.  */
1967      df_refs_queue (df);
1968      df_refs_record (df, blocks);
1969
1970      /* Link all the new defs and uses to the insns.  */
1971      df_refs_process (df);
1972    }
1973
1974  /* Allocate the bitmaps now the total number of defs and uses are
1975     known.  If the number of defs or uses have changed, then
1976     these bitmaps need to be reallocated.  */
1977  df_bitmaps_alloc (df, NULL, aflags);
1978
1979  /* Set the LUIDs for each specified basic block.  */
1980  df_luids_set (df, blocks);
1981
1982  /* Recreate reg-def and reg-use chains from scratch so that first
1983     def is at the head of the reg-def chain and the last use is at
1984     the head of the reg-use chain.  This is only important for
1985     regs local to a basic block as it speeds up searching.  */
1986  if (aflags & DF_RD_CHAIN)
1987    {
1988      df_reg_def_chain_create (df, blocks, false);
1989    }
1990
1991  if (aflags & DF_RU_CHAIN)
1992    {
1993      df_reg_use_chain_create (df, blocks, false);
1994    }
1995
1996  df->dfs_order = xmalloc (sizeof (int) * n_basic_blocks);
1997  df->rc_order = xmalloc (sizeof (int) * n_basic_blocks);
1998  df->rts_order = xmalloc (sizeof (int) * n_basic_blocks);
1999  df->inverse_dfs_map = xmalloc (sizeof (int) * last_basic_block);
2000  df->inverse_rc_map = xmalloc (sizeof (int) * last_basic_block);
2001  df->inverse_rts_map = xmalloc (sizeof (int) * last_basic_block);
2002
2003  flow_depth_first_order_compute (df->dfs_order, df->rc_order);
2004  flow_reverse_top_sort_order_compute (df->rts_order);
2005  for (i = 0; i < n_basic_blocks; i++)
2006    {
2007      df->inverse_dfs_map[df->dfs_order[i]] = i;
2008      df->inverse_rc_map[df->rc_order[i]] = i;
2009      df->inverse_rts_map[df->rts_order[i]] = i;
2010    }
2011  if (aflags & DF_RD)
2012    {
2013      /* Compute the sets of gens and kills for the defs of each bb.  */
2014      dflow.in = xmalloc (sizeof (bitmap) * last_basic_block);
2015      dflow.out = xmalloc (sizeof (bitmap) * last_basic_block);
2016      dflow.gen = xmalloc (sizeof (bitmap) * last_basic_block);
2017      dflow.kill = xmalloc (sizeof (bitmap) * last_basic_block);
2018
2019      df_rd_local_compute (df, df->flags & DF_RD ? blocks : df->all_blocks);
2020      FOR_EACH_BB (bb)
2021	{
2022	  dflow.in[bb->index] = DF_BB_INFO (df, bb)->rd_in;
2023	  dflow.out[bb->index] = DF_BB_INFO (df, bb)->rd_out;
2024	  dflow.gen[bb->index] = DF_BB_INFO (df, bb)->rd_gen;
2025	  dflow.kill[bb->index] = DF_BB_INFO (df, bb)->rd_kill;
2026	}
2027
2028      dflow.repr = SR_BITMAP;
2029      dflow.dir = DF_FORWARD;
2030      dflow.conf_op = DF_UNION;
2031      dflow.transfun = df_rd_transfer_function;
2032      dflow.n_blocks = n_basic_blocks;
2033      dflow.order = df->rc_order;
2034      dflow.data = NULL;
2035
2036      iterative_dataflow (&dflow);
2037      free (dflow.in);
2038      free (dflow.out);
2039      free (dflow.gen);
2040      free (dflow.kill);
2041    }
2042
2043  if (aflags & DF_UD_CHAIN)
2044    {
2045      /* Create use-def chains.  */
2046      df_ud_chain_create (df, df->all_blocks);
2047
2048      if (! (flags & DF_RD))
2049	dflags |= DF_RD;
2050    }
2051
2052  if (aflags & DF_RU)
2053    {
2054      /* Compute the sets of gens and kills for the upwards exposed
2055	 uses in each bb.  */
2056      dflow.in = xmalloc (sizeof (bitmap) * last_basic_block);
2057      dflow.out = xmalloc (sizeof (bitmap) * last_basic_block);
2058      dflow.gen = xmalloc (sizeof (bitmap) * last_basic_block);
2059      dflow.kill = xmalloc (sizeof (bitmap) * last_basic_block);
2060
2061      df_ru_local_compute (df, df->flags & DF_RU ? blocks : df->all_blocks);
2062
2063      FOR_EACH_BB (bb)
2064	{
2065	  dflow.in[bb->index] = DF_BB_INFO (df, bb)->ru_in;
2066	  dflow.out[bb->index] = DF_BB_INFO (df, bb)->ru_out;
2067	  dflow.gen[bb->index] = DF_BB_INFO (df, bb)->ru_gen;
2068	  dflow.kill[bb->index] = DF_BB_INFO (df, bb)->ru_kill;
2069	}
2070
2071      dflow.repr = SR_BITMAP;
2072      dflow.dir = DF_BACKWARD;
2073      dflow.conf_op = DF_UNION;
2074      dflow.transfun = df_ru_transfer_function;
2075      dflow.n_blocks = n_basic_blocks;
2076      dflow.order = df->rts_order;
2077      dflow.data = NULL;
2078
2079      iterative_dataflow (&dflow);
2080      free (dflow.in);
2081      free (dflow.out);
2082      free (dflow.gen);
2083      free (dflow.kill);
2084    }
2085
2086  if (aflags & DF_DU_CHAIN)
2087    {
2088      /* Create def-use chains.  */
2089      df_du_chain_create (df, df->all_blocks);
2090
2091      if (! (flags & DF_RU))
2092	dflags |= DF_RU;
2093    }
2094
2095  /* Free up bitmaps that are no longer required.  */
2096  if (dflags)
2097    df_bitmaps_free (df, dflags);
2098
2099  if (aflags & DF_LR)
2100    {
2101      /* Compute the sets of defs and uses of live variables.  */
2102      dflow.in = xmalloc (sizeof (bitmap) * last_basic_block);
2103      dflow.out = xmalloc (sizeof (bitmap) * last_basic_block);
2104      dflow.gen = xmalloc (sizeof (bitmap) * last_basic_block);
2105      dflow.kill = xmalloc (sizeof (bitmap) * last_basic_block);
2106
2107      df_lr_local_compute (df, df->flags & DF_LR ? blocks : df->all_blocks);
2108
2109      FOR_EACH_BB (bb)
2110	{
2111	  dflow.in[bb->index] = DF_BB_INFO (df, bb)->lr_in;
2112	  dflow.out[bb->index] = DF_BB_INFO (df, bb)->lr_out;
2113	  dflow.gen[bb->index] = DF_BB_INFO (df, bb)->lr_use;
2114	  dflow.kill[bb->index] = DF_BB_INFO (df, bb)->lr_def;
2115	}
2116
2117      dflow.repr = SR_BITMAP;
2118      dflow.dir = DF_BACKWARD;
2119      dflow.conf_op = DF_UNION;
2120      dflow.transfun = df_lr_transfer_function;
2121      dflow.n_blocks = n_basic_blocks;
2122      dflow.order = df->rts_order;
2123      dflow.data = NULL;
2124
2125      iterative_dataflow (&dflow);
2126      free (dflow.in);
2127      free (dflow.out);
2128      free (dflow.gen);
2129      free (dflow.kill);
2130    }
2131
2132  if (aflags & DF_REG_INFO)
2133    {
2134      df_reg_info_compute (df, df->all_blocks);
2135    }
2136
2137  free (df->dfs_order);
2138  free (df->rc_order);
2139  free (df->rts_order);
2140  free (df->inverse_rc_map);
2141  free (df->inverse_dfs_map);
2142  free (df->inverse_rts_map);
2143}
2144
2145
2146/* Initialize dataflow analysis.  */
2147struct df *
2148df_init (void)
2149{
2150  struct df *df;
2151
2152  df = xcalloc (1, sizeof (struct df));
2153
2154  /* Squirrel away a global for debugging.  */
2155  ddf = df;
2156
2157  return df;
2158}
2159
2160
2161/* Start queuing refs.  */
2162static int
2163df_refs_queue (struct df *df)
2164{
2165  df->def_id_save = df->def_id;
2166  df->use_id_save = df->use_id;
2167  /* ???? Perhaps we should save current obstack state so that we can
2168     unwind it.  */
2169  return 0;
2170}
2171
2172
2173/* Process queued refs.  */
2174static int
2175df_refs_process (struct df *df)
2176{
2177  unsigned int i;
2178
2179  /* Build new insn-def chains.  */
2180  for (i = df->def_id_save; i != df->def_id; i++)
2181    {
2182      struct ref *def = df->defs[i];
2183      unsigned int uid = DF_REF_INSN_UID (def);
2184
2185      /* Add def to head of def list for INSN.  */
2186      df->insns[uid].defs
2187	= df_link_create (def, df->insns[uid].defs);
2188    }
2189
2190  /* Build new insn-use chains.  */
2191  for (i = df->use_id_save; i != df->use_id; i++)
2192    {
2193      struct ref *use = df->uses[i];
2194      unsigned int uid = DF_REF_INSN_UID (use);
2195
2196      /* Add use to head of use list for INSN.  */
2197      df->insns[uid].uses
2198	= df_link_create (use, df->insns[uid].uses);
2199    }
2200  return 0;
2201}
2202
2203
2204/* Update refs for basic block BB.  */
2205static int
2206df_bb_refs_update (struct df *df, basic_block bb)
2207{
2208  rtx insn;
2209  int count = 0;
2210
2211  /* While we have to scan the chain of insns for this BB, we do not
2212     need to allocate and queue a long chain of BB/INSN pairs.  Using
2213     a bitmap for insns_modified saves memory and avoids queuing
2214     duplicates.  */
2215
2216  FOR_BB_INSNS (bb, insn)
2217    {
2218      unsigned int uid;
2219
2220      uid = INSN_UID (insn);
2221
2222      if (bitmap_bit_p (df->insns_modified, uid))
2223	{
2224	  /* Delete any allocated refs of this insn.  MPH,  FIXME.  */
2225	  df_insn_refs_unlink (df, bb, insn);
2226
2227	  /* Scan the insn for refs.  */
2228	  df_insn_refs_record (df, bb, insn);
2229
2230	  count++;
2231	}
2232    }
2233  return count;
2234}
2235
2236
2237/* Process all the modified/deleted insns that were queued.  */
2238static int
2239df_refs_update (struct df *df, bitmap blocks)
2240{
2241  basic_block bb;
2242  unsigned count = 0, bbno;
2243
2244  df->n_regs = max_reg_num ();
2245  if (df->n_regs >= df->reg_size)
2246    df_reg_table_realloc (df, 0);
2247
2248  df_refs_queue (df);
2249
2250  if (!blocks)
2251    {
2252      FOR_EACH_BB_IN_BITMAP (df->bbs_modified, 0, bb,
2253	{
2254	  count += df_bb_refs_update (df, bb);
2255	});
2256    }
2257  else
2258    {
2259      bitmap_iterator bi;
2260
2261      EXECUTE_IF_AND_IN_BITMAP (df->bbs_modified, blocks, 0, bbno, bi)
2262	{
2263	  count += df_bb_refs_update (df, BASIC_BLOCK (bbno));
2264	}
2265    }
2266
2267  df_refs_process (df);
2268  return count;
2269}
2270
2271
2272/* Return nonzero if any of the requested blocks in the bitmap
2273   BLOCKS have been modified.  */
2274static int
2275df_modified_p (struct df *df, bitmap blocks)
2276{
2277  int update = 0;
2278  basic_block bb;
2279
2280  if (!df->n_bbs)
2281    return 0;
2282
2283  FOR_EACH_BB (bb)
2284    if (bitmap_bit_p (df->bbs_modified, bb->index)
2285	&& (! blocks || (blocks == (bitmap) -1) || bitmap_bit_p (blocks, bb->index)))
2286    {
2287      update = 1;
2288      break;
2289    }
2290
2291  return update;
2292}
2293
2294/* Analyze dataflow info for the basic blocks specified by the bitmap
2295   BLOCKS, or for the whole CFG if BLOCKS is zero, or just for the
2296   modified blocks if BLOCKS is -1.  */
2297
2298int
2299df_analyze (struct df *df, bitmap blocks, int flags)
2300{
2301  int update;
2302
2303  /* We could deal with additional basic blocks being created by
2304     rescanning everything again.  */
2305  gcc_assert (!df->n_bbs || df->n_bbs == (unsigned int) last_basic_block);
2306
2307  update = df_modified_p (df, blocks);
2308  if (update || (flags != df->flags))
2309    {
2310      if (! blocks)
2311	{
2312	  if (df->n_bbs)
2313	    {
2314	      /* Recompute everything from scratch.  */
2315	      df_free (df);
2316	    }
2317	  /* Allocate and initialize data structures.  */
2318	  df_alloc (df, max_reg_num ());
2319	  df_analyze_1 (df, 0, flags, 0);
2320	  update = 1;
2321	}
2322      else
2323	{
2324	  if (blocks == (bitmap) -1)
2325	    blocks = df->bbs_modified;
2326
2327	  gcc_assert (df->n_bbs);
2328
2329	  df_analyze_1 (df, blocks, flags, 1);
2330	  bitmap_zero (df->bbs_modified);
2331	  bitmap_zero (df->insns_modified);
2332	}
2333    }
2334  return update;
2335}
2336
2337/* Remove the entries not in BLOCKS from the LIST of length LEN, preserving
2338   the order of the remaining entries.  Returns the length of the resulting
2339   list.  */
2340
2341static unsigned
2342prune_to_subcfg (int list[], unsigned len, bitmap blocks)
2343{
2344  unsigned act, last;
2345
2346  for (act = 0, last = 0; act < len; act++)
2347    if (bitmap_bit_p (blocks, list[act]))
2348      list[last++] = list[act];
2349
2350  return last;
2351}
2352
2353/* Alternative entry point to the analysis.  Analyze just the part of the cfg
2354   graph induced by BLOCKS.
2355
2356   TODO I am not quite sure how to avoid code duplication with df_analyze_1
2357   here, and simultaneously not make even greater chaos in it.  We behave
2358   slightly differently in some details, especially in handling modified
2359   insns.  */
2360
2361void
2362df_analyze_subcfg (struct df *df, bitmap blocks, int flags)
2363{
2364  rtx insn;
2365  basic_block bb;
2366  struct dataflow dflow;
2367  unsigned n_blocks;
2368
2369  if (flags & DF_UD_CHAIN)
2370    flags |= DF_RD | DF_RD_CHAIN;
2371  if (flags & DF_DU_CHAIN)
2372    flags |= DF_RU;
2373  if (flags & DF_RU)
2374    flags |= DF_RU_CHAIN;
2375  if (flags & DF_REG_INFO)
2376    flags |= DF_LR;
2377
2378  if (!df->n_bbs)
2379    {
2380      df_alloc (df, max_reg_num ());
2381
2382      /* Mark all insns as modified.  */
2383
2384      FOR_EACH_BB (bb)
2385	{
2386	  FOR_BB_INSNS (bb, insn)
2387	    {
2388	      df_insn_modify (df, bb, insn);
2389	    }
2390	}
2391    }
2392
2393  df->flags = flags;
2394
2395  df_reg_def_chain_clean (df);
2396  df_reg_use_chain_clean (df);
2397
2398  df_refs_update (df, blocks);
2399
2400  /* Clear the updated stuff from ``modified'' bitmaps.  */
2401  FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
2402    {
2403      if (bitmap_bit_p (df->bbs_modified, bb->index))
2404	{
2405	  FOR_BB_INSNS (bb, insn)
2406	    {
2407	      bitmap_clear_bit (df->insns_modified, INSN_UID (insn));
2408	    }
2409
2410	  bitmap_clear_bit (df->bbs_modified, bb->index);
2411	}
2412    });
2413
2414  /* Allocate the bitmaps now the total number of defs and uses are
2415     known.  If the number of defs or uses have changed, then
2416     these bitmaps need to be reallocated.  */
2417  df_bitmaps_alloc (df, blocks, flags);
2418
2419  /* Set the LUIDs for each specified basic block.  */
2420  df_luids_set (df, blocks);
2421
2422  /* Recreate reg-def and reg-use chains from scratch so that first
2423     def is at the head of the reg-def chain and the last use is at
2424     the head of the reg-use chain.  This is only important for
2425     regs local to a basic block as it speeds up searching.  */
2426  if (flags & DF_RD_CHAIN)
2427    {
2428      df_reg_def_chain_create (df, blocks, true);
2429    }
2430
2431  if (flags & DF_RU_CHAIN)
2432    {
2433      df_reg_use_chain_create (df, blocks, true);
2434    }
2435
2436  df->dfs_order = xmalloc (sizeof (int) * n_basic_blocks);
2437  df->rc_order = xmalloc (sizeof (int) * n_basic_blocks);
2438  df->rts_order = xmalloc (sizeof (int) * n_basic_blocks);
2439
2440  flow_depth_first_order_compute (df->dfs_order, df->rc_order);
2441  flow_reverse_top_sort_order_compute (df->rts_order);
2442
2443  n_blocks = prune_to_subcfg (df->dfs_order, n_basic_blocks, blocks);
2444  prune_to_subcfg (df->rc_order, n_basic_blocks, blocks);
2445  prune_to_subcfg (df->rts_order, n_basic_blocks, blocks);
2446
2447  dflow.in = xmalloc (sizeof (bitmap) * last_basic_block);
2448  dflow.out = xmalloc (sizeof (bitmap) * last_basic_block);
2449  dflow.gen = xmalloc (sizeof (bitmap) * last_basic_block);
2450  dflow.kill = xmalloc (sizeof (bitmap) * last_basic_block);
2451
2452  if (flags & DF_RD)
2453    {
2454      /* Compute the sets of gens and kills for the defs of each bb.  */
2455      df_rd_local_compute (df, blocks);
2456
2457      FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
2458	{
2459	  dflow.in[bb->index] = DF_BB_INFO (df, bb)->rd_in;
2460	  dflow.out[bb->index] = DF_BB_INFO (df, bb)->rd_out;
2461	  dflow.gen[bb->index] = DF_BB_INFO (df, bb)->rd_gen;
2462	  dflow.kill[bb->index] = DF_BB_INFO (df, bb)->rd_kill;
2463	});
2464
2465      dflow.repr = SR_BITMAP;
2466      dflow.dir = DF_FORWARD;
2467      dflow.conf_op = DF_UNION;
2468      dflow.transfun = df_rd_transfer_function;
2469      dflow.n_blocks = n_blocks;
2470      dflow.order = df->rc_order;
2471      dflow.data = NULL;
2472
2473      iterative_dataflow (&dflow);
2474    }
2475
2476  if (flags & DF_UD_CHAIN)
2477    {
2478      /* Create use-def chains.  */
2479      df_ud_chain_create (df, blocks);
2480    }
2481
2482  if (flags & DF_RU)
2483    {
2484      /* Compute the sets of gens and kills for the upwards exposed
2485	 uses in each bb.  */
2486      df_ru_local_compute (df, blocks);
2487
2488      FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
2489	{
2490	  dflow.in[bb->index] = DF_BB_INFO (df, bb)->ru_in;
2491	  dflow.out[bb->index] = DF_BB_INFO (df, bb)->ru_out;
2492	  dflow.gen[bb->index] = DF_BB_INFO (df, bb)->ru_gen;
2493	  dflow.kill[bb->index] = DF_BB_INFO (df, bb)->ru_kill;
2494	});
2495
2496      dflow.repr = SR_BITMAP;
2497      dflow.dir = DF_BACKWARD;
2498      dflow.conf_op = DF_UNION;
2499      dflow.transfun = df_ru_transfer_function;
2500      dflow.n_blocks = n_blocks;
2501      dflow.order = df->rts_order;
2502      dflow.data = NULL;
2503
2504      iterative_dataflow (&dflow);
2505    }
2506
2507  if (flags & DF_DU_CHAIN)
2508    {
2509      /* Create def-use chains.  */
2510      df_du_chain_create (df, blocks);
2511    }
2512
2513  if (flags & DF_LR)
2514    {
2515      /* Compute the sets of defs and uses of live variables.  */
2516      df_lr_local_compute (df, blocks);
2517
2518      FOR_EACH_BB (bb)
2519	{
2520	  dflow.in[bb->index] = DF_BB_INFO (df, bb)->lr_in;
2521	  dflow.out[bb->index] = DF_BB_INFO (df, bb)->lr_out;
2522	  dflow.gen[bb->index] = DF_BB_INFO (df, bb)->lr_use;
2523	  dflow.kill[bb->index] = DF_BB_INFO (df, bb)->lr_def;
2524	}
2525
2526      dflow.repr = SR_BITMAP;
2527      dflow.dir = DF_BACKWARD;
2528      dflow.conf_op = DF_UNION;
2529      dflow.transfun = df_lr_transfer_function;
2530      dflow.n_blocks = n_blocks;
2531      dflow.order = df->rts_order;
2532      dflow.data = NULL;
2533
2534      iterative_dataflow (&dflow);
2535    }
2536
2537  if (flags & DF_REG_INFO)
2538    {
2539      df_reg_info_compute (df, blocks);
2540    }
2541
2542  free (dflow.in);
2543  free (dflow.out);
2544  free (dflow.gen);
2545  free (dflow.kill);
2546
2547  free (df->dfs_order);
2548  free (df->rc_order);
2549  free (df->rts_order);
2550}
2551
2552/* Free all the dataflow info and the DF structure.  */
2553void
2554df_finish (struct df *df)
2555{
2556  df_free (df);
2557  free (df);
2558}
2559
2560/* Unlink INSN from its reference information.  */
2561static void
2562df_insn_refs_unlink (struct df *df, basic_block bb ATTRIBUTE_UNUSED, rtx insn)
2563{
2564  struct df_link *link;
2565  unsigned int uid;
2566
2567  uid = INSN_UID (insn);
2568
2569  /* Unlink all refs defined by this insn.  */
2570  for (link = df->insns[uid].defs; link; link = link->next)
2571    df_def_unlink (df, link->ref);
2572
2573  /* Unlink all refs used by this insn.  */
2574  for (link = df->insns[uid].uses; link; link = link->next)
2575    df_use_unlink (df, link->ref);
2576
2577  df->insns[uid].defs = 0;
2578  df->insns[uid].uses = 0;
2579}
2580
2581
2582#if 0
2583/* Unlink all the insns within BB from their reference information.  */
2584static void
2585df_bb_refs_unlink (struct df *df, basic_block bb)
2586{
2587  rtx insn;
2588
2589  /* Scan the block an insn at a time from beginning to end.  */
2590  for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
2591    {
2592      if (INSN_P (insn))
2593	{
2594	  /* Unlink refs for INSN.  */
2595	  df_insn_refs_unlink (df, bb, insn);
2596	}
2597      if (insn == BB_END (bb))
2598	break;
2599    }
2600}
2601
2602
2603/* Unlink all the refs in the basic blocks specified by BLOCKS.
2604   Not currently used.  */
2605static void
2606df_refs_unlink (struct df *df, bitmap blocks)
2607{
2608  basic_block bb;
2609
2610  if (blocks)
2611    {
2612      FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
2613      {
2614	df_bb_refs_unlink (df, bb);
2615      });
2616    }
2617  else
2618    {
2619      FOR_EACH_BB (bb)
2620	df_bb_refs_unlink (df, bb);
2621    }
2622}
2623#endif
2624
2625/* Functions to modify insns.  */
2626
2627
2628/* Delete INSN and all its reference information.  */
2629rtx
2630df_insn_delete (struct df *df, basic_block bb ATTRIBUTE_UNUSED, rtx insn)
2631{
2632  /* If the insn is a jump, we should perhaps call delete_insn to
2633     handle the JUMP_LABEL?  */
2634
2635  /* We should not be deleting the NOTE_INSN_BASIC_BLOCK or label.  */
2636  gcc_assert (insn != BB_HEAD (bb));
2637
2638  /* Delete the insn.  */
2639  delete_insn (insn);
2640
2641  df_insn_modify (df, bb, insn);
2642
2643  return NEXT_INSN (insn);
2644}
2645
2646/* Mark that basic block BB was modified.  */
2647
2648static void
2649df_bb_modify (struct df *df, basic_block bb)
2650{
2651  if ((unsigned) bb->index >= df->n_bbs)
2652    df_bb_table_realloc (df, df->n_bbs);
2653
2654  bitmap_set_bit (df->bbs_modified, bb->index);
2655}
2656
2657/* Mark that INSN within BB may have changed  (created/modified/deleted).
2658   This may be called multiple times for the same insn.  There is no
2659   harm calling this function if the insn wasn't changed; it will just
2660   slow down the rescanning of refs.  */
2661void
2662df_insn_modify (struct df *df, basic_block bb, rtx insn)
2663{
2664  unsigned int uid;
2665
2666  uid = INSN_UID (insn);
2667  if (uid >= df->insn_size)
2668    df_insn_table_realloc (df, uid);
2669
2670  df_bb_modify (df, bb);
2671  bitmap_set_bit (df->insns_modified, uid);
2672
2673  /* For incremental updating on the fly, perhaps we could make a copy
2674     of all the refs of the original insn and turn them into
2675     anti-refs.  When df_refs_update finds these anti-refs, it annihilates
2676     the original refs.  If validate_change fails then these anti-refs
2677     will just get ignored.  */
2678}
2679
2680/* Check if INSN was marked as changed.  Of course the correctness of
2681   the information depends on whether the instruction was really modified
2682   at the time df_insn_modify was called.  */
2683bool
2684df_insn_modified_p (struct df *df, rtx insn)
2685{
2686  unsigned int uid;
2687
2688  uid = INSN_UID (insn);
2689  return (df->insns_modified
2690	  && uid < df->insn_size
2691          && bitmap_bit_p (df->insns_modified, uid));
2692}
2693
2694typedef struct replace_args
2695{
2696  rtx match;
2697  rtx replacement;
2698  rtx insn;
2699  int modified;
2700} replace_args;
2701
2702
2703/* Replace mem pointed to by PX with its associated pseudo register.
2704   DATA is actually a pointer to a structure describing the
2705   instruction currently being scanned and the MEM we are currently
2706   replacing.  */
2707static int
2708df_rtx_mem_replace (rtx *px, void *data)
2709{
2710  replace_args *args = (replace_args *) data;
2711  rtx mem = *px;
2712
2713  if (mem == NULL_RTX)
2714    return 0;
2715
2716  switch (GET_CODE (mem))
2717    {
2718    case MEM:
2719      break;
2720
2721    case CONST_DOUBLE:
2722      /* We're not interested in the MEM associated with a
2723	 CONST_DOUBLE, so there's no need to traverse into one.  */
2724      return -1;
2725
2726    default:
2727      /* This is not a MEM.  */
2728      return 0;
2729    }
2730
2731  if (!rtx_equal_p (args->match, mem))
2732    /* This is not the MEM we are currently replacing.  */
2733    return 0;
2734
2735  /* Actually replace the MEM.  */
2736  validate_change (args->insn, px, args->replacement, 1);
2737  args->modified++;
2738
2739  return 0;
2740}
2741
2742
2743int
2744df_insn_mem_replace (struct df *df, basic_block bb, rtx insn, rtx mem, rtx reg)
2745{
2746  replace_args args;
2747
2748  args.insn = insn;
2749  args.match = mem;
2750  args.replacement = reg;
2751  args.modified = 0;
2752
2753  /* Search and replace all matching mems within insn.  */
2754  for_each_rtx (&insn, df_rtx_mem_replace, &args);
2755
2756  if (args.modified)
2757    df_insn_modify (df, bb, insn);
2758
2759  /* ???? FIXME.  We may have a new def or one or more new uses of REG
2760     in INSN.  REG should be a new pseudo so it won't affect the
2761     dataflow information that we currently have.  We should add
2762     the new uses and defs to INSN and then recreate the chains
2763     when df_analyze is called.  */
2764  return args.modified;
2765}
2766
2767
2768/* Replace one register with another.  Called through for_each_rtx; PX
2769   points to the rtx being scanned.  DATA is actually a pointer to a
2770   structure of arguments.  */
2771static int
2772df_rtx_reg_replace (rtx *px, void *data)
2773{
2774  rtx x = *px;
2775  replace_args *args = (replace_args *) data;
2776
2777  if (x == NULL_RTX)
2778    return 0;
2779
2780  if (x == args->match)
2781    {
2782      validate_change (args->insn, px, args->replacement, 1);
2783      args->modified++;
2784    }
2785
2786  return 0;
2787}
2788
2789
2790/* Replace the reg within every ref on CHAIN that is within the set
2791   BLOCKS of basic blocks with NEWREG.  Also update the regs within
2792   REG_NOTES.  */
2793void
2794df_refs_reg_replace (struct df *df, bitmap blocks, struct df_link *chain, rtx oldreg, rtx newreg)
2795{
2796  struct df_link *link;
2797  replace_args args;
2798
2799  if (! blocks)
2800    blocks = df->all_blocks;
2801
2802  args.match = oldreg;
2803  args.replacement = newreg;
2804  args.modified = 0;
2805
2806  for (link = chain; link; link = link->next)
2807    {
2808      struct ref *ref = link->ref;
2809      rtx insn = DF_REF_INSN (ref);
2810
2811      if (! INSN_P (insn))
2812	continue;
2813
2814      gcc_assert (bitmap_bit_p (blocks, DF_REF_BBNO (ref)));
2815
2816      df_ref_reg_replace (df, ref, oldreg, newreg);
2817
2818      /* Replace occurrences of the reg within the REG_NOTES.  */
2819      if ((! link->next || DF_REF_INSN (ref)
2820	   != DF_REF_INSN (link->next->ref))
2821	  && REG_NOTES (insn))
2822	{
2823	  args.insn = insn;
2824	  for_each_rtx (&REG_NOTES (insn), df_rtx_reg_replace, &args);
2825	}
2826    }
2827}
2828
2829
2830/* Replace all occurrences of register OLDREG with register NEWREG in
2831   blocks defined by bitmap BLOCKS.  This also replaces occurrences of
2832   OLDREG in the REG_NOTES but only for insns containing OLDREG.  This
2833   routine expects the reg-use and reg-def chains to be valid.  */
2834int
2835df_reg_replace (struct df *df, bitmap blocks, rtx oldreg, rtx newreg)
2836{
2837  unsigned int oldregno = REGNO (oldreg);
2838
2839  df_refs_reg_replace (df, blocks, df->regs[oldregno].defs, oldreg, newreg);
2840  df_refs_reg_replace (df, blocks, df->regs[oldregno].uses, oldreg, newreg);
2841  return 1;
2842}
2843
2844
2845/* Try replacing the reg within REF with NEWREG.  Do not modify
2846   def-use/use-def chains.  */
2847int
2848df_ref_reg_replace (struct df *df, struct ref *ref, rtx oldreg, rtx newreg)
2849{
2850  /* Check that insn was deleted by being converted into a NOTE.  If
2851   so ignore this insn.  */
2852  if (! INSN_P (DF_REF_INSN (ref)))
2853    return 0;
2854
2855  gcc_assert (!oldreg || oldreg == DF_REF_REG (ref));
2856
2857  if (! validate_change (DF_REF_INSN (ref), DF_REF_LOC (ref), newreg, 1))
2858    return 0;
2859
2860  df_insn_modify (df, DF_REF_BB (ref), DF_REF_INSN (ref));
2861  return 1;
2862}
2863
2864
2865struct ref*
2866df_bb_def_use_swap (struct df *df, basic_block bb, rtx def_insn, rtx use_insn, unsigned int regno)
2867{
2868  struct ref *def;
2869  struct ref *use;
2870  int def_uid;
2871  int use_uid;
2872  struct df_link *link;
2873
2874  def = df_bb_insn_regno_first_def_find (df, bb, def_insn, regno);
2875  if (! def)
2876    return 0;
2877
2878  use = df_bb_insn_regno_last_use_find (df, bb, use_insn, regno);
2879  if (! use)
2880    return 0;
2881
2882  /* The USE no longer exists.  */
2883  use_uid = INSN_UID (use_insn);
2884  df_use_unlink (df, use);
2885  df_ref_unlink (&df->insns[use_uid].uses, use);
2886
2887  /* The DEF requires shifting so remove it from DEF_INSN
2888     and add it to USE_INSN by reusing LINK.  */
2889  def_uid = INSN_UID (def_insn);
2890  link = df_ref_unlink (&df->insns[def_uid].defs, def);
2891  link->ref = def;
2892  link->next = df->insns[use_uid].defs;
2893  df->insns[use_uid].defs = link;
2894
2895#if 0
2896  link = df_ref_unlink (&df->regs[regno].defs, def);
2897  link->ref = def;
2898  link->next = df->regs[regno].defs;
2899  df->insns[regno].defs = link;
2900#endif
2901
2902  DF_REF_INSN (def) = use_insn;
2903  return def;
2904}
2905
2906
2907/* Record df between FIRST_INSN and LAST_INSN inclusive.  All new
2908   insns must be processed by this routine.  */
2909static void
2910df_insns_modify (struct df *df, basic_block bb, rtx first_insn, rtx last_insn)
2911{
2912  rtx insn;
2913
2914  for (insn = first_insn; ; insn = NEXT_INSN (insn))
2915    {
2916      unsigned int uid;
2917
2918      /* A non-const call should not have slipped through the net.  If
2919	 it does, we need to create a new basic block.  Ouch.  The
2920	 same applies for a label.  */
2921      gcc_assert ((!CALL_P (insn) || CONST_OR_PURE_CALL_P (insn))
2922		  && !LABEL_P (insn));
2923
2924      uid = INSN_UID (insn);
2925
2926      if (uid >= df->insn_size)
2927	df_insn_table_realloc (df, uid);
2928
2929      df_insn_modify (df, bb, insn);
2930
2931      if (insn == last_insn)
2932	break;
2933    }
2934}
2935
2936
2937/* Emit PATTERN before INSN within BB.  */
2938rtx
2939df_pattern_emit_before (struct df *df, rtx pattern, basic_block bb, rtx insn)
2940{
2941  rtx ret_insn;
2942  rtx prev_insn = PREV_INSN (insn);
2943
2944  /* We should not be inserting before the start of the block.  */
2945  gcc_assert (insn != BB_HEAD (bb));
2946  ret_insn = emit_insn_before (pattern, insn);
2947  if (ret_insn == insn)
2948    return ret_insn;
2949
2950  df_insns_modify (df, bb, NEXT_INSN (prev_insn), ret_insn);
2951  return ret_insn;
2952}
2953
2954
2955/* Emit PATTERN after INSN within BB.  */
2956rtx
2957df_pattern_emit_after (struct df *df, rtx pattern, basic_block bb, rtx insn)
2958{
2959  rtx ret_insn;
2960
2961  ret_insn = emit_insn_after (pattern, insn);
2962  if (ret_insn == insn)
2963    return ret_insn;
2964
2965  df_insns_modify (df, bb, NEXT_INSN (insn), ret_insn);
2966  return ret_insn;
2967}
2968
2969
2970/* Emit jump PATTERN after INSN within BB.  */
2971rtx
2972df_jump_pattern_emit_after (struct df *df, rtx pattern, basic_block bb, rtx insn)
2973{
2974  rtx ret_insn;
2975
2976  ret_insn = emit_jump_insn_after (pattern, insn);
2977  if (ret_insn == insn)
2978    return ret_insn;
2979
2980  df_insns_modify (df, bb, NEXT_INSN (insn), ret_insn);
2981  return ret_insn;
2982}
2983
2984
2985/* Move INSN within BB before BEFORE_INSN within BEFORE_BB.
2986
2987   This function should only be used to move loop invariant insns
2988   out of a loop where it has been proven that the def-use info
2989   will still be valid.  */
2990rtx
2991df_insn_move_before (struct df *df, basic_block bb, rtx insn, basic_block before_bb, rtx before_insn)
2992{
2993  struct df_link *link;
2994  unsigned int uid;
2995
2996  if (! bb)
2997    return df_pattern_emit_before (df, insn, before_bb, before_insn);
2998
2999  uid = INSN_UID (insn);
3000
3001  /* Change bb for all df defined and used by this insn.  */
3002  for (link = df->insns[uid].defs; link; link = link->next)
3003    DF_REF_BB (link->ref) = before_bb;
3004  for (link = df->insns[uid].uses; link; link = link->next)
3005    DF_REF_BB (link->ref) = before_bb;
3006
3007  /* The lifetimes of the registers used in this insn will be reduced
3008     while the lifetimes of the registers defined in this insn
3009     are likely to be increased.  */
3010
3011  /* ???? Perhaps all the insns moved should be stored on a list
3012     which df_analyze removes when it recalculates data flow.  */
3013
3014  return emit_insn_before (insn, before_insn);
3015}
3016
3017/* Functions to query dataflow information.  */
3018
3019
3020int
3021df_insn_regno_def_p (struct df *df, basic_block bb ATTRIBUTE_UNUSED,
3022		     rtx insn, unsigned int regno)
3023{
3024  unsigned int uid;
3025  struct df_link *link;
3026
3027  uid = INSN_UID (insn);
3028
3029  for (link = df->insns[uid].defs; link; link = link->next)
3030    {
3031      struct ref *def = link->ref;
3032
3033      if (DF_REF_REGNO (def) == regno)
3034	return 1;
3035    }
3036
3037  return 0;
3038}
3039
3040/* Finds the reference corresponding to the definition of REG in INSN.
3041   DF is the dataflow object.  */
3042
3043struct ref *
3044df_find_def (struct df *df, rtx insn, rtx reg)
3045{
3046  struct df_link *defs;
3047
3048  for (defs = DF_INSN_DEFS (df, insn); defs; defs = defs->next)
3049    if (rtx_equal_p (DF_REF_REG (defs->ref), reg))
3050      return defs->ref;
3051
3052  return NULL;
3053}
3054
3055/* Return 1 if REG is referenced in INSN, zero otherwise.  */
3056
3057int
3058df_reg_used (struct df *df, rtx insn, rtx reg)
3059{
3060  struct df_link *uses;
3061
3062  for (uses = DF_INSN_USES (df, insn); uses; uses = uses->next)
3063    if (rtx_equal_p (DF_REF_REG (uses->ref), reg))
3064      return 1;
3065
3066  return 0;
3067}
3068
3069static int
3070df_def_dominates_all_uses_p (struct df *df ATTRIBUTE_UNUSED, struct ref *def)
3071{
3072  struct df_link *du_link;
3073
3074  /* Follow def-use chain to find all the uses of this def.  */
3075  for (du_link = DF_REF_CHAIN (def); du_link; du_link = du_link->next)
3076    {
3077      struct ref *use = du_link->ref;
3078      struct df_link *ud_link;
3079
3080      /* Follow use-def chain to check all the defs for this use.  */
3081      for (ud_link = DF_REF_CHAIN (use); ud_link; ud_link = ud_link->next)
3082	if (ud_link->ref != def)
3083	  return 0;
3084    }
3085  return 1;
3086}
3087
3088
3089int
3090df_insn_dominates_all_uses_p (struct df *df, basic_block bb ATTRIBUTE_UNUSED,
3091			      rtx insn)
3092{
3093  unsigned int uid;
3094  struct df_link *link;
3095
3096  uid = INSN_UID (insn);
3097
3098  for (link = df->insns[uid].defs; link; link = link->next)
3099    {
3100      struct ref *def = link->ref;
3101
3102      if (! df_def_dominates_all_uses_p (df, def))
3103	return 0;
3104    }
3105
3106  return 1;
3107}
3108
3109
3110/* Return nonzero if all DF dominates all the uses within the bitmap
3111   BLOCKS.  */
3112static int
3113df_def_dominates_uses_p (struct df *df ATTRIBUTE_UNUSED, struct ref *def,
3114			 bitmap blocks)
3115{
3116  struct df_link *du_link;
3117
3118  /* Follow def-use chain to find all the uses of this def.  */
3119  for (du_link = DF_REF_CHAIN (def); du_link; du_link = du_link->next)
3120    {
3121      struct ref *use = du_link->ref;
3122      struct df_link *ud_link;
3123
3124      /* Only worry about the uses within BLOCKS.  For example,
3125      consider a register defined within a loop that is live at the
3126      loop exits.  */
3127      if (bitmap_bit_p (blocks, DF_REF_BBNO (use)))
3128	{
3129	  /* Follow use-def chain to check all the defs for this use.  */
3130	  for (ud_link = DF_REF_CHAIN (use); ud_link; ud_link = ud_link->next)
3131	    if (ud_link->ref != def)
3132	      return 0;
3133	}
3134    }
3135  return 1;
3136}
3137
3138
3139/* Return nonzero if all the defs of INSN within BB dominates
3140   all the corresponding uses.  */
3141int
3142df_insn_dominates_uses_p (struct df *df, basic_block bb ATTRIBUTE_UNUSED,
3143			  rtx insn, bitmap blocks)
3144{
3145  unsigned int uid;
3146  struct df_link *link;
3147
3148  uid = INSN_UID (insn);
3149
3150  for (link = df->insns[uid].defs; link; link = link->next)
3151    {
3152      struct ref *def = link->ref;
3153
3154      /* Only consider the defs within BLOCKS.  */
3155      if (bitmap_bit_p (blocks, DF_REF_BBNO (def))
3156	  && ! df_def_dominates_uses_p (df, def, blocks))
3157	return 0;
3158    }
3159  return 1;
3160}
3161
3162
3163/* Return the basic block that REG referenced in or NULL if referenced
3164   in multiple basic blocks.  */
3165basic_block
3166df_regno_bb (struct df *df, unsigned int regno)
3167{
3168  struct df_link *defs = df->regs[regno].defs;
3169  struct df_link *uses = df->regs[regno].uses;
3170  struct ref *def = defs ? defs->ref : 0;
3171  struct ref *use = uses ? uses->ref : 0;
3172  basic_block bb_def = def ? DF_REF_BB (def) : 0;
3173  basic_block bb_use = use ? DF_REF_BB (use) : 0;
3174
3175  /* Compare blocks of first def and last use.  ???? FIXME.  What if
3176     the reg-def and reg-use lists are not correctly ordered.  */
3177  return bb_def == bb_use ? bb_def : 0;
3178}
3179
3180
3181/* Return nonzero if REG used in multiple basic blocks.  */
3182int
3183df_reg_global_p (struct df *df, rtx reg)
3184{
3185  return df_regno_bb (df, REGNO (reg)) != 0;
3186}
3187
3188
3189/* Return total lifetime (in insns) of REG.  */
3190int
3191df_reg_lifetime (struct df *df, rtx reg)
3192{
3193  return df->regs[REGNO (reg)].lifetime;
3194}
3195
3196
3197/* Return nonzero if REG live at start of BB.  */
3198int
3199df_bb_reg_live_start_p (struct df *df, basic_block bb, rtx reg)
3200{
3201  struct bb_info *bb_info = DF_BB_INFO (df, bb);
3202
3203  gcc_assert (bb_info->lr_in);
3204
3205  return bitmap_bit_p (bb_info->lr_in, REGNO (reg));
3206}
3207
3208
3209/* Return nonzero if REG live at end of BB.  */
3210int
3211df_bb_reg_live_end_p (struct df *df, basic_block bb, rtx reg)
3212{
3213  struct bb_info *bb_info = DF_BB_INFO (df, bb);
3214
3215  gcc_assert (bb_info->lr_in);
3216
3217  return bitmap_bit_p (bb_info->lr_out, REGNO (reg));
3218}
3219
3220
3221/* Return -1 if life of REG1 before life of REG2, 1 if life of REG1
3222   after life of REG2, or 0, if the lives overlap.  */
3223int
3224df_bb_regs_lives_compare (struct df *df, basic_block bb, rtx reg1, rtx reg2)
3225{
3226  unsigned int regno1 = REGNO (reg1);
3227  unsigned int regno2 = REGNO (reg2);
3228  struct ref *def1;
3229  struct ref *use1;
3230  struct ref *def2;
3231  struct ref *use2;
3232
3233
3234  /* The regs must be local to BB.  */
3235  gcc_assert (df_regno_bb (df, regno1) == bb
3236	      && df_regno_bb (df, regno2) == bb);
3237
3238  def2 = df_bb_regno_first_def_find (df, bb, regno2);
3239  use1 = df_bb_regno_last_use_find (df, bb, regno1);
3240
3241  if (DF_INSN_LUID (df, DF_REF_INSN (def2))
3242      > DF_INSN_LUID (df, DF_REF_INSN (use1)))
3243    return -1;
3244
3245  def1 = df_bb_regno_first_def_find (df, bb, regno1);
3246  use2 = df_bb_regno_last_use_find (df, bb, regno2);
3247
3248  if (DF_INSN_LUID (df, DF_REF_INSN (def1))
3249      > DF_INSN_LUID (df, DF_REF_INSN (use2)))
3250    return 1;
3251
3252  return 0;
3253}
3254
3255
3256/* Return true if the definition DEF, which is in the same basic
3257   block as USE, is available at USE.  So DEF may as well be
3258   dead, in which case using it will extend its live range.  */
3259bool
3260df_local_def_available_p (struct df *df, struct ref *def, struct ref *use)
3261{
3262  struct df_link *link;
3263  int def_luid = DF_INSN_LUID (df, DF_REF_INSN (def));
3264  int in_bb = 0;
3265  unsigned int regno = REGNO (def->reg);
3266  basic_block bb;
3267
3268  /* The regs must be local to BB.  */
3269  gcc_assert (DF_REF_BB (def) == DF_REF_BB (use));
3270  bb = DF_REF_BB (def);
3271
3272  /* This assumes that the reg-def list is ordered such that for any
3273     BB, the first def is found first.  However, since the BBs are not
3274     ordered, the first def in the chain is not necessarily the first
3275     def in the function.  */
3276  for (link = df->regs[regno].defs; link; link = link->next)
3277    {
3278      struct ref *this_def = link->ref;
3279      if (DF_REF_BB (this_def) == bb)
3280	{
3281	  int this_luid = DF_INSN_LUID (df, DF_REF_INSN (this_def));
3282	  /* Do nothing with defs coming before DEF.  */
3283	  if (this_luid > def_luid)
3284	    return this_luid > DF_INSN_LUID (df, DF_REF_INSN (use));
3285
3286	  in_bb = 1;
3287        }
3288      else if (in_bb)
3289	/* DEF was the last in its basic block.  */
3290        return 1;
3291    }
3292
3293  /* DEF was the last in the function.  */
3294  return 1;
3295}
3296
3297
3298/* Return last use of REGNO within BB.  */
3299struct ref *
3300df_bb_regno_last_use_find (struct df *df, basic_block bb, unsigned int regno)
3301{
3302  struct df_link *link;
3303
3304  /* This assumes that the reg-use list is ordered such that for any
3305     BB, the last use is found first.  However, since the BBs are not
3306     ordered, the first use in the chain is not necessarily the last
3307     use in the function.  */
3308  for (link = df->regs[regno].uses; link; link = link->next)
3309    {
3310      struct ref *use = link->ref;
3311
3312      if (DF_REF_BB (use) == bb)
3313	return use;
3314    }
3315  return 0;
3316}
3317
3318
3319/* Return first def of REGNO within BB.  */
3320struct ref *
3321df_bb_regno_first_def_find (struct df *df, basic_block bb, unsigned int regno)
3322{
3323  struct df_link *link;
3324
3325  /* This assumes that the reg-def list is ordered such that for any
3326     BB, the first def is found first.  However, since the BBs are not
3327     ordered, the first def in the chain is not necessarily the first
3328     def in the function.  */
3329  for (link = df->regs[regno].defs; link; link = link->next)
3330    {
3331      struct ref *def = link->ref;
3332
3333      if (DF_REF_BB (def) == bb)
3334	return def;
3335    }
3336  return 0;
3337}
3338
3339/* Return last def of REGNO within BB.  */
3340struct ref *
3341df_bb_regno_last_def_find (struct df *df, basic_block bb, unsigned int regno)
3342{
3343  struct df_link *link;
3344  struct ref *last_def = NULL;
3345  int in_bb = 0;
3346
3347  /* This assumes that the reg-def list is ordered such that for any
3348     BB, the first def is found first.  However, since the BBs are not
3349     ordered, the first def in the chain is not necessarily the first
3350     def in the function.  */
3351  for (link = df->regs[regno].defs; link; link = link->next)
3352    {
3353      struct ref *def = link->ref;
3354      /* The first time in the desired block.  */
3355      if (DF_REF_BB (def) == bb)
3356	  in_bb = 1;
3357      /* The last def in the desired block.  */
3358      else if (in_bb)
3359        return last_def;
3360      last_def = def;
3361    }
3362  return last_def;
3363}
3364
3365/* Return last use of REGNO inside INSN within BB.  */
3366static struct ref *
3367df_bb_insn_regno_last_use_find (struct df *df,
3368				basic_block bb ATTRIBUTE_UNUSED, rtx insn,
3369				unsigned int regno)
3370{
3371  unsigned int uid;
3372  struct df_link *link;
3373
3374  uid = INSN_UID (insn);
3375
3376  for (link = df->insns[uid].uses; link; link = link->next)
3377    {
3378      struct ref *use = link->ref;
3379
3380      if (DF_REF_REGNO (use) == regno)
3381	return use;
3382    }
3383
3384  return 0;
3385}
3386
3387
3388/* Return first def of REGNO inside INSN within BB.  */
3389static struct ref *
3390df_bb_insn_regno_first_def_find (struct df *df,
3391				 basic_block bb ATTRIBUTE_UNUSED, rtx insn,
3392				 unsigned int regno)
3393{
3394  unsigned int uid;
3395  struct df_link *link;
3396
3397  uid = INSN_UID (insn);
3398
3399  for (link = df->insns[uid].defs; link; link = link->next)
3400    {
3401      struct ref *def = link->ref;
3402
3403      if (DF_REF_REGNO (def) == regno)
3404	return def;
3405    }
3406
3407  return 0;
3408}
3409
3410
3411/* Return insn using REG if the BB contains only a single
3412   use and def of REG.  */
3413rtx
3414df_bb_single_def_use_insn_find (struct df *df, basic_block bb, rtx insn, rtx reg)
3415{
3416  struct ref *def;
3417  struct ref *use;
3418  struct df_link *du_link;
3419
3420  def = df_bb_insn_regno_first_def_find (df, bb, insn, REGNO (reg));
3421
3422  gcc_assert (def);
3423
3424  du_link = DF_REF_CHAIN (def);
3425
3426  if (! du_link)
3427    return NULL_RTX;
3428
3429  use = du_link->ref;
3430
3431  /* Check if def is dead.  */
3432  if (! use)
3433    return NULL_RTX;
3434
3435  /* Check for multiple uses.  */
3436  if (du_link->next)
3437    return NULL_RTX;
3438
3439  return DF_REF_INSN (use);
3440}
3441
3442/* Functions for debugging/dumping dataflow information.  */
3443
3444
3445/* Dump a def-use or use-def chain for REF to FILE.  */
3446static void
3447df_chain_dump (struct df_link *link, FILE *file)
3448{
3449  fprintf (file, "{ ");
3450  for (; link; link = link->next)
3451    {
3452      fprintf (file, "%c%d ",
3453	       DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
3454	       DF_REF_ID (link->ref));
3455    }
3456  fprintf (file, "}");
3457}
3458
3459
3460/* Dump a chain of refs with the associated regno.  */
3461static void
3462df_chain_dump_regno (struct df_link *link, FILE *file)
3463{
3464  fprintf (file, "{ ");
3465  for (; link; link = link->next)
3466    {
3467      fprintf (file, "%c%d(%d) ",
3468	       DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
3469	       DF_REF_ID (link->ref),
3470	       DF_REF_REGNO (link->ref));
3471    }
3472  fprintf (file, "}");
3473}
3474
3475
3476/* Dump dataflow info.  */
3477void
3478df_dump (struct df *df, int flags, FILE *file)
3479{
3480  unsigned int j;
3481  basic_block bb;
3482
3483  if (! df || ! file)
3484    return;
3485
3486  fprintf (file, "\nDataflow summary:\n");
3487  fprintf (file, "n_regs = %d, n_defs = %d, n_uses = %d, n_bbs = %d\n",
3488	   df->n_regs, df->n_defs, df->n_uses, df->n_bbs);
3489
3490  if (flags & DF_RD)
3491    {
3492      basic_block bb;
3493
3494      fprintf (file, "Reaching defs:\n");
3495      FOR_EACH_BB (bb)
3496	{
3497	  struct bb_info *bb_info = DF_BB_INFO (df, bb);
3498
3499	  if (! bb_info->rd_in)
3500	    continue;
3501
3502	  fprintf (file, "bb %d in  \t", bb->index);
3503	  dump_bitmap (file, bb_info->rd_in);
3504	  fprintf (file, "bb %d gen \t", bb->index);
3505	  dump_bitmap (file, bb_info->rd_gen);
3506	  fprintf (file, "bb %d kill\t", bb->index);
3507	  dump_bitmap (file, bb_info->rd_kill);
3508	  fprintf (file, "bb %d out \t", bb->index);
3509	  dump_bitmap (file, bb_info->rd_out);
3510	}
3511    }
3512
3513  if (flags & DF_UD_CHAIN)
3514    {
3515      fprintf (file, "Use-def chains:\n");
3516      for (j = 0; j < df->n_defs; j++)
3517	{
3518	  if (df->defs[j])
3519	    {
3520	      fprintf (file, "d%d bb %d luid %d insn %d reg %d ",
3521		       j, DF_REF_BBNO (df->defs[j]),
3522		       DF_INSN_LUID (df, DF_REF_INSN (df->defs[j])),
3523		       DF_REF_INSN_UID (df->defs[j]),
3524		       DF_REF_REGNO (df->defs[j]));
3525	      if (df->defs[j]->flags & DF_REF_READ_WRITE)
3526		fprintf (file, "read/write ");
3527	      df_chain_dump (DF_REF_CHAIN (df->defs[j]), file);
3528	      fprintf (file, "\n");
3529	    }
3530	}
3531    }
3532
3533  if (flags & DF_RU)
3534    {
3535      fprintf (file, "Reaching uses:\n");
3536      FOR_EACH_BB (bb)
3537	{
3538	  struct bb_info *bb_info = DF_BB_INFO (df, bb);
3539
3540	  if (! bb_info->ru_in)
3541	    continue;
3542
3543	  fprintf (file, "bb %d in  \t", bb->index);
3544	  dump_bitmap (file, bb_info->ru_in);
3545	  fprintf (file, "bb %d gen \t", bb->index);
3546	  dump_bitmap (file, bb_info->ru_gen);
3547	  fprintf (file, "bb %d kill\t", bb->index);
3548	  dump_bitmap (file, bb_info->ru_kill);
3549	  fprintf (file, "bb %d out \t", bb->index);
3550	  dump_bitmap (file, bb_info->ru_out);
3551	}
3552    }
3553
3554  if (flags & DF_DU_CHAIN)
3555    {
3556      fprintf (file, "Def-use chains:\n");
3557      for (j = 0; j < df->n_uses; j++)
3558	{
3559	  if (df->uses[j])
3560	    {
3561	      fprintf (file, "u%d bb %d luid %d insn %d reg %d ",
3562		       j, DF_REF_BBNO (df->uses[j]),
3563		       DF_INSN_LUID (df, DF_REF_INSN (df->uses[j])),
3564		       DF_REF_INSN_UID (df->uses[j]),
3565		       DF_REF_REGNO (df->uses[j]));
3566	      if (df->uses[j]->flags & DF_REF_READ_WRITE)
3567		fprintf (file, "read/write ");
3568	      df_chain_dump (DF_REF_CHAIN (df->uses[j]), file);
3569	      fprintf (file, "\n");
3570	    }
3571	}
3572    }
3573
3574  if (flags & DF_LR)
3575    {
3576      fprintf (file, "Live regs:\n");
3577      FOR_EACH_BB (bb)
3578	{
3579	  struct bb_info *bb_info = DF_BB_INFO (df, bb);
3580
3581	  if (! bb_info->lr_in)
3582	    continue;
3583
3584	  fprintf (file, "bb %d in  \t", bb->index);
3585	  dump_bitmap (file, bb_info->lr_in);
3586	  fprintf (file, "bb %d use \t", bb->index);
3587	  dump_bitmap (file, bb_info->lr_use);
3588	  fprintf (file, "bb %d def \t", bb->index);
3589	  dump_bitmap (file, bb_info->lr_def);
3590	  fprintf (file, "bb %d out \t", bb->index);
3591	  dump_bitmap (file, bb_info->lr_out);
3592	}
3593    }
3594
3595  if (flags & (DF_REG_INFO | DF_RD_CHAIN | DF_RU_CHAIN))
3596    {
3597      struct reg_info *reg_info = df->regs;
3598
3599      fprintf (file, "Register info:\n");
3600      for (j = 0; j < df->n_regs; j++)
3601	{
3602	  if (((flags & DF_REG_INFO)
3603	       && (reg_info[j].n_uses || reg_info[j].n_defs))
3604	      || ((flags & DF_RD_CHAIN) && reg_info[j].defs)
3605	      || ((flags & DF_RU_CHAIN) && reg_info[j].uses))
3606	    {
3607	      fprintf (file, "reg %d", j);
3608	      if ((flags & DF_RD_CHAIN) && (flags & DF_RU_CHAIN))
3609		{
3610		  basic_block bb = df_regno_bb (df, j);
3611
3612		  if (bb)
3613		    fprintf (file, " bb %d", bb->index);
3614		  else
3615		    fprintf (file, " bb ?");
3616		}
3617	      if (flags & DF_REG_INFO)
3618		{
3619		  fprintf (file, " life %d", reg_info[j].lifetime);
3620		}
3621
3622	      if ((flags & DF_REG_INFO) || (flags & DF_RD_CHAIN))
3623		{
3624		  fprintf (file, " defs ");
3625		  if (flags & DF_REG_INFO)
3626		    fprintf (file, "%d ", reg_info[j].n_defs);
3627		  if (flags & DF_RD_CHAIN)
3628		    df_chain_dump (reg_info[j].defs, file);
3629		}
3630
3631	      if ((flags & DF_REG_INFO) || (flags & DF_RU_CHAIN))
3632		{
3633		  fprintf (file, " uses ");
3634		  if (flags & DF_REG_INFO)
3635		    fprintf (file, "%d ", reg_info[j].n_uses);
3636		  if (flags & DF_RU_CHAIN)
3637		    df_chain_dump (reg_info[j].uses, file);
3638		}
3639
3640	      fprintf (file, "\n");
3641	    }
3642	}
3643    }
3644  fprintf (file, "\n");
3645}
3646
3647
3648void
3649df_insn_debug (struct df *df, rtx insn, FILE *file)
3650{
3651  unsigned int uid;
3652  int bbi;
3653
3654  uid = INSN_UID (insn);
3655  if (uid >= df->insn_size)
3656    return;
3657
3658  if (df->insns[uid].defs)
3659    bbi = DF_REF_BBNO (df->insns[uid].defs->ref);
3660  else if (df->insns[uid].uses)
3661    bbi = DF_REF_BBNO (df->insns[uid].uses->ref);
3662  else
3663    bbi = -1;
3664
3665  fprintf (file, "insn %d bb %d luid %d defs ",
3666	   uid, bbi, DF_INSN_LUID (df, insn));
3667  df_chain_dump (df->insns[uid].defs, file);
3668  fprintf (file, " uses ");
3669  df_chain_dump (df->insns[uid].uses, file);
3670  fprintf (file, "\n");
3671}
3672
3673
3674void
3675df_insn_debug_regno (struct df *df, rtx insn, FILE *file)
3676{
3677  unsigned int uid;
3678  int bbi;
3679
3680  uid = INSN_UID (insn);
3681  if (uid >= df->insn_size)
3682    return;
3683
3684  if (df->insns[uid].defs)
3685    bbi = DF_REF_BBNO (df->insns[uid].defs->ref);
3686  else if (df->insns[uid].uses)
3687    bbi = DF_REF_BBNO (df->insns[uid].uses->ref);
3688  else
3689    bbi = -1;
3690
3691  fprintf (file, "insn %d bb %d luid %d defs ",
3692	   uid, bbi, DF_INSN_LUID (df, insn));
3693  df_chain_dump_regno (df->insns[uid].defs, file);
3694  fprintf (file, " uses ");
3695  df_chain_dump_regno (df->insns[uid].uses, file);
3696  fprintf (file, "\n");
3697}
3698
3699
3700static void
3701df_regno_debug (struct df *df, unsigned int regno, FILE *file)
3702{
3703  if (regno >= df->reg_size)
3704    return;
3705
3706  fprintf (file, "reg %d life %d defs ",
3707	   regno, df->regs[regno].lifetime);
3708  df_chain_dump (df->regs[regno].defs, file);
3709  fprintf (file, " uses ");
3710  df_chain_dump (df->regs[regno].uses, file);
3711  fprintf (file, "\n");
3712}
3713
3714
3715static void
3716df_ref_debug (struct df *df, struct ref *ref, FILE *file)
3717{
3718  fprintf (file, "%c%d ",
3719	   DF_REF_REG_DEF_P (ref) ? 'd' : 'u',
3720	   DF_REF_ID (ref));
3721  fprintf (file, "reg %d bb %d luid %d insn %d chain ",
3722	   DF_REF_REGNO (ref),
3723	   DF_REF_BBNO (ref),
3724	   DF_INSN_LUID (df, DF_REF_INSN (ref)),
3725	   INSN_UID (DF_REF_INSN (ref)));
3726  df_chain_dump (DF_REF_CHAIN (ref), file);
3727  fprintf (file, "\n");
3728}
3729
3730/* Functions for debugging from GDB.  */
3731
3732void
3733debug_df_insn (rtx insn)
3734{
3735  df_insn_debug (ddf, insn, stderr);
3736  debug_rtx (insn);
3737}
3738
3739
3740void
3741debug_df_reg (rtx reg)
3742{
3743  df_regno_debug (ddf, REGNO (reg), stderr);
3744}
3745
3746
3747void
3748debug_df_regno (unsigned int regno)
3749{
3750  df_regno_debug (ddf, regno, stderr);
3751}
3752
3753
3754void
3755debug_df_ref (struct ref *ref)
3756{
3757  df_ref_debug (ddf, ref, stderr);
3758}
3759
3760
3761void
3762debug_df_defno (unsigned int defno)
3763{
3764  df_ref_debug (ddf, ddf->defs[defno], stderr);
3765}
3766
3767
3768void
3769debug_df_useno (unsigned int defno)
3770{
3771  df_ref_debug (ddf, ddf->uses[defno], stderr);
3772}
3773
3774
3775void
3776debug_df_chain (struct df_link *link)
3777{
3778  df_chain_dump (link, stderr);
3779  fputc ('\n', stderr);
3780}
3781
3782
3783/* Perform the set operation OP1 OP OP2, using set representation REPR, and
3784   storing the result in OP1.  */
3785
3786static void
3787dataflow_set_a_op_b (enum set_representation repr,
3788		     enum df_confluence_op op,
3789		     void *op1, void *op2)
3790{
3791  switch (repr)
3792    {
3793    case SR_SBITMAP:
3794      switch (op)
3795	{
3796	case DF_UNION:
3797	  sbitmap_a_or_b (op1, op1, op2);
3798	  break;
3799
3800	case DF_INTERSECTION:
3801	  sbitmap_a_and_b (op1, op1, op2);
3802	  break;
3803
3804    	default:
3805	  gcc_unreachable ();
3806	}
3807      break;
3808
3809    case SR_BITMAP:
3810      switch (op)
3811	{
3812	case DF_UNION:
3813	  bitmap_ior_into (op1, op2);
3814	  break;
3815
3816	case DF_INTERSECTION:
3817	  bitmap_and_into (op1, op2);
3818	  break;
3819
3820    	default:
3821	  gcc_unreachable ();
3822	}
3823      break;
3824
3825    default:
3826      gcc_unreachable ();
3827    }
3828}
3829
3830static void
3831dataflow_set_copy (enum set_representation repr, void *dest, void *src)
3832{
3833  switch (repr)
3834    {
3835    case SR_SBITMAP:
3836      sbitmap_copy (dest, src);
3837      break;
3838
3839    case SR_BITMAP:
3840      bitmap_copy (dest, src);
3841      break;
3842
3843    default:
3844      gcc_unreachable ();
3845    }
3846}
3847
3848/* Hybrid search algorithm from "Implementation Techniques for
3849   Efficient Data-Flow Analysis of Large Programs".  */
3850
3851static void
3852hybrid_search (basic_block bb, struct dataflow *dataflow,
3853	       sbitmap visited, sbitmap pending, sbitmap considered)
3854{
3855  int changed;
3856  int i = bb->index;
3857  edge e;
3858  edge_iterator ei;
3859
3860  SET_BIT (visited, bb->index);
3861  gcc_assert (TEST_BIT (pending, bb->index));
3862  RESET_BIT (pending, i);
3863
3864#define HS(E_ANTI, E_ANTI_BB, E_ANTI_START_BB, IN_SET,			\
3865	   E, E_BB, E_START_BB, OUT_SET)				\
3866  do									\
3867    {									\
3868      /*  Calculate <conf_op> of predecessor_outs.  */			\
3869      bitmap_zero (IN_SET[i]);						\
3870      FOR_EACH_EDGE (e, ei, bb->E_ANTI)					\
3871	{								\
3872	  if (e->E_ANTI_BB == E_ANTI_START_BB)				\
3873	    continue;							\
3874	  if (!TEST_BIT (considered, e->E_ANTI_BB->index))		\
3875	    continue;							\
3876									\
3877	  dataflow_set_a_op_b (dataflow->repr, dataflow->conf_op,	\
3878			       IN_SET[i],      			        \
3879			       OUT_SET[e->E_ANTI_BB->index]);		\
3880	}								\
3881									\
3882      (*dataflow->transfun)(i, &changed,				\
3883			    dataflow->in[i], dataflow->out[i],		\
3884			    dataflow->gen[i], dataflow->kill[i],	\
3885			    dataflow->data);				\
3886									\
3887      if (!changed)							\
3888	break;								\
3889									\
3890      FOR_EACH_EDGE (e, ei, bb->E)						\
3891	{								\
3892	  if (e->E_BB == E_START_BB || e->E_BB->index == i)		\
3893	    continue;							\
3894									\
3895	  if (!TEST_BIT (considered, e->E_BB->index))			\
3896	    continue;							\
3897									\
3898	  SET_BIT (pending, e->E_BB->index);				\
3899      	}								\
3900									\
3901      FOR_EACH_EDGE (e, ei, bb->E)						\
3902	{								\
3903	  if (e->E_BB == E_START_BB || e->E_BB->index == i)		\
3904	    continue;							\
3905									\
3906	  if (!TEST_BIT (considered, e->E_BB->index))			\
3907	    continue;							\
3908									\
3909	  if (!TEST_BIT (visited, e->E_BB->index))			\
3910	    hybrid_search (e->E_BB, dataflow, visited, pending, considered); \
3911	}								\
3912    } while (0)
3913
3914  if (dataflow->dir == DF_FORWARD)
3915    HS (preds, src, ENTRY_BLOCK_PTR, dataflow->in,
3916	succs, dest, EXIT_BLOCK_PTR, dataflow->out);
3917  else
3918    HS (succs, dest, EXIT_BLOCK_PTR, dataflow->out,
3919	preds, src, ENTRY_BLOCK_PTR, dataflow->in);
3920}
3921
3922/* This function will perform iterative bitvector dataflow described by
3923   DATAFLOW, producing the in and out sets.  Only the part of the cfg
3924   induced by blocks in DATAFLOW->order is taken into account.
3925
3926   For forward problems, you probably want to pass in a mapping of
3927   block number to rc_order (like df->inverse_rc_map).  */
3928
3929void
3930iterative_dataflow (struct dataflow *dataflow)
3931{
3932  unsigned i, idx;
3933  sbitmap visited, pending, considered;
3934
3935  pending = sbitmap_alloc (last_basic_block);
3936  visited = sbitmap_alloc (last_basic_block);
3937  considered = sbitmap_alloc (last_basic_block);
3938  sbitmap_zero (pending);
3939  sbitmap_zero (visited);
3940  sbitmap_zero (considered);
3941
3942  for (i = 0; i < dataflow->n_blocks; i++)
3943    {
3944      idx = dataflow->order[i];
3945      SET_BIT (pending, idx);
3946      SET_BIT (considered, idx);
3947      if (dataflow->dir == DF_FORWARD)
3948	dataflow_set_copy (dataflow->repr,
3949			   dataflow->out[idx], dataflow->gen[idx]);
3950      else
3951	dataflow_set_copy (dataflow->repr,
3952			   dataflow->in[idx], dataflow->gen[idx]);
3953    };
3954
3955  while (1)
3956    {
3957      for (i = 0; i < dataflow->n_blocks; i++)
3958	{
3959	  idx = dataflow->order[i];
3960
3961	  if (TEST_BIT (pending, idx) && !TEST_BIT (visited, idx))
3962	    hybrid_search (BASIC_BLOCK (idx), dataflow,
3963			   visited, pending, considered);
3964	}
3965
3966      if (sbitmap_first_set_bit (pending) == -1)
3967	break;
3968
3969      sbitmap_zero (visited);
3970    }
3971
3972  sbitmap_free (pending);
3973  sbitmap_free (visited);
3974  sbitmap_free (considered);
3975}
3976