Deleted Added
sdiff udiff text old ( 146906 ) new ( 169699 )
full compact
1/* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
4 Free Software Foundation, Inc.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING. If not, write to the Free
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA. */
22
23
24/* Middle-to-low level generation of rtx code and insns.
25
26 This file contains support functions for creating rtl expressions
27 and manipulating them in the doubly-linked chain of insns.
28
29 The patterns of the insns are created by machine-dependent
30 routines in insn-emit.c, which is generated automatically from
31 the machine description. These routines make the individual rtx's
32 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33 which are automatically generated from rtl.def; what is machine
34 dependent is the kind of rtx's they make and what arguments they
35 use. */
36
37#include "config.h"
38#include "system.h"
39#include "coretypes.h"
40#include "tm.h"
41#include "toplev.h"
42#include "rtl.h"
43#include "tree.h"

--- 7 unchanged lines hidden (view full) ---

51#include "insn-config.h"
52#include "recog.h"
53#include "real.h"
54#include "bitmap.h"
55#include "basic-block.h"
56#include "ggc.h"
57#include "debug.h"
58#include "langhooks.h"
59#include "tree-pass.h"
60
61/* Commonly used modes. */
62
63enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
64enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
65enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
66enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
67
68
69/* This is *not* reset after each function. It gives each CODE_LABEL
70 in the entire compilation a unique label number. */
71
72static GTY(()) int label_num = 1;
73
74/* Nonzero means do not generate NOTEs for source line numbers. */
75
76static int no_line_numbers;
77
78/* Commonly used rtx's, so that we only need space for one copy.
79 These are initialized once for the entire compilation.
80 All of these are unique; no other rtx-object will be equal to any
81 of these. */

--- 78 unchanged lines hidden (view full) ---

160 htab_t const_double_htab;
161
162#define first_insn (cfun->emit->x_first_insn)
163#define last_insn (cfun->emit->x_last_insn)
164#define cur_insn_uid (cfun->emit->x_cur_insn_uid)
165#define last_location (cfun->emit->x_last_location)
166#define first_label_num (cfun->emit->x_first_label_num)
167
168static rtx make_call_insn_raw (rtx);
169static rtx find_line_note (rtx);
170static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
171static void unshare_all_decls (tree);
172static void reset_used_decls (tree);
173static void mark_label_nuses (rtx);
174static hashval_t const_int_htab_hash (const void *);
175static int const_int_htab_eq (const void *, const void *);
176static hashval_t const_double_htab_hash (const void *);
177static int const_double_htab_eq (const void *, const void *);
178static rtx lookup_const_double (rtx);
179static hashval_t mem_attrs_htab_hash (const void *);
180static int mem_attrs_htab_eq (const void *, const void *);
181static mem_attrs *get_mem_attrs (HOST_WIDE_INT, tree, rtx, rtx, unsigned int,
182 enum machine_mode);
183static hashval_t reg_attrs_htab_hash (const void *);
184static int reg_attrs_htab_eq (const void *, const void *);
185static reg_attrs *get_reg_attrs (tree, int);
186static tree component_ref_for_mem_expr (tree);
187static rtx gen_const_vector (enum machine_mode, int);
188static void copy_rtx_if_shared_1 (rtx *orig);
189
190/* Probability of the conditional branch currently proceeded by try_split.
191 Set to -1 otherwise. */
192int split_branch_probability = -1;
193
194/* Returns a hash code for X (which is a really a CONST_INT). */
195

--- 53 unchanged lines hidden (view full) ---

249static hashval_t
250mem_attrs_htab_hash (const void *x)
251{
252 mem_attrs *p = (mem_attrs *) x;
253
254 return (p->alias ^ (p->align * 1000)
255 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
256 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
257 ^ (size_t) iterative_hash_expr (p->expr, 0));
258}
259
260/* Returns nonzero if the value represented by X (which is really a
261 mem_attrs *) is the same as that given by Y (which is also really a
262 mem_attrs *). */
263
264static int
265mem_attrs_htab_eq (const void *x, const void *y)
266{
267 mem_attrs *p = (mem_attrs *) x;
268 mem_attrs *q = (mem_attrs *) y;
269
270 return (p->alias == q->alias && p->offset == q->offset
271 && p->size == q->size && p->align == q->align
272 && (p->expr == q->expr
273 || (p->expr != NULL_TREE && q->expr != NULL_TREE
274 && operand_equal_p (p->expr, q->expr, 0))));
275}
276
277/* Allocate a new mem_attrs structure and insert it into the hash table if
278 one identical to it is not already in the table. We are doing this for
279 MEM of mode MODE. */
280
281static mem_attrs *
282get_mem_attrs (HOST_WIDE_INT alias, tree expr, rtx offset, rtx size,

--- 141 unchanged lines hidden (view full) ---

424/* Return a CONST_DOUBLE rtx for a floating-point value specified by
425 VALUE in mode MODE. */
426rtx
427const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
428{
429 rtx real = rtx_alloc (CONST_DOUBLE);
430 PUT_MODE (real, mode);
431
432 real->u.rv = value;
433
434 return lookup_const_double (real);
435}
436
437/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
438 of ints: I0 is the low-order word and I1 is the high-order word.
439 Do not use this routine for non-integer modes; convert to
440 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
441
442rtx
443immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
444{
445 rtx value;
446 unsigned int i;
447
448 /* There are the following cases (note that there are no modes with
449 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
450
451 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
452 gen_int_mode.
453 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
454 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
455 from copies of the sign bit, and sign of i0 and i1 are the same), then
456 we return a CONST_INT for i0.
457 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
458 if (mode != VOIDmode)
459 {
460 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
461 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
462 /* We can get a 0 for an error mark. */
463 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
464 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
465
466 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
467 return gen_int_mode (i0, mode);
468
469 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
470 }
471
472 /* If this integer fits in one word, return a CONST_INT. */
473 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
474 return GEN_INT (i0);
475
476 /* We use VOIDmode for integers. */
477 value = rtx_alloc (CONST_DOUBLE);

--- 80 unchanged lines hidden (view full) ---

558
559 /* This field is not cleared by the mere allocation of the rtx, so
560 we clear it here. */
561 MEM_ATTRS (rt) = 0;
562
563 return rt;
564}
565
566/* Generate a memory referring to non-trapping constant memory. */
567
568rtx
569gen_const_mem (enum machine_mode mode, rtx addr)
570{
571 rtx mem = gen_rtx_MEM (mode, addr);
572 MEM_READONLY_P (mem) = 1;
573 MEM_NOTRAP_P (mem) = 1;
574 return mem;
575}
576
577/* Generate a MEM referring to fixed portions of the frame, e.g., register
578 save areas. */
579
580rtx
581gen_frame_mem (enum machine_mode mode, rtx addr)
582{
583 rtx mem = gen_rtx_MEM (mode, addr);
584 MEM_NOTRAP_P (mem) = 1;
585 set_mem_alias_set (mem, get_frame_alias_set ());
586 return mem;
587}
588
589/* Generate a MEM referring to a temporary use of the stack, not part
590 of the fixed stack frame. For example, something which is pushed
591 by a target splitter. */
592rtx
593gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
594{
595 rtx mem = gen_rtx_MEM (mode, addr);
596 MEM_NOTRAP_P (mem) = 1;
597 if (!current_function_calls_alloca)
598 set_mem_alias_set (mem, get_frame_alias_set ());
599 return mem;
600}
601
602/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
603 this construct would be valid, and false otherwise. */
604
605bool
606validate_subreg (enum machine_mode omode, enum machine_mode imode,
607 rtx reg, unsigned int offset)
608{
609 unsigned int isize = GET_MODE_SIZE (imode);
610 unsigned int osize = GET_MODE_SIZE (omode);
611
612 /* All subregs must be aligned. */
613 if (offset % osize != 0)
614 return false;
615
616 /* The subreg offset cannot be outside the inner object. */
617 if (offset >= isize)
618 return false;
619
620 /* ??? This should not be here. Temporarily continue to allow word_mode
621 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
622 Generally, backends are doing something sketchy but it'll take time to
623 fix them all. */
624 if (omode == word_mode)
625 ;
626 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
627 is the culprit here, and not the backends. */
628 else if (osize >= UNITS_PER_WORD && isize >= osize)
629 ;
630 /* Allow component subregs of complex and vector. Though given the below
631 extraction rules, it's not always clear what that means. */
632 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
633 && GET_MODE_INNER (imode) == omode)
634 ;
635 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
636 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
637 represent this. It's questionable if this ought to be represented at
638 all -- why can't this all be hidden in post-reload splitters that make
639 arbitrarily mode changes to the registers themselves. */
640 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
641 ;
642 /* Subregs involving floating point modes are not allowed to
643 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
644 (subreg:SI (reg:DF) 0) isn't. */
645 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
646 {
647 if (isize != osize)
648 return false;
649 }
650
651 /* Paradoxical subregs must have offset zero. */
652 if (osize > isize)
653 return offset == 0;
654
655 /* This is a normal subreg. Verify that the offset is representable. */
656
657 /* For hard registers, we already have most of these rules collected in
658 subreg_offset_representable_p. */
659 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
660 {
661 unsigned int regno = REGNO (reg);
662
663#ifdef CANNOT_CHANGE_MODE_CLASS
664 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
665 && GET_MODE_INNER (imode) == omode)
666 ;
667 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
668 return false;
669#endif
670
671 return subreg_offset_representable_p (regno, imode, offset, omode);
672 }
673
674 /* For pseudo registers, we want most of the same checks. Namely:
675 If the register no larger than a word, the subreg must be lowpart.
676 If the register is larger than a word, the subreg must be the lowpart
677 of a subword. A subreg does *not* perform arbitrary bit extraction.
678 Given that we've already checked mode/offset alignment, we only have
679 to check subword subregs here. */
680 if (osize < UNITS_PER_WORD)
681 {
682 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
683 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
684 if (offset % UNITS_PER_WORD != low_off)
685 return false;
686 }
687 return true;
688}
689
690rtx
691gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
692{
693 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
694 return gen_rtx_raw_SUBREG (mode, reg, offset);
695}
696
697/* Generate a SUBREG representing the least-significant part of REG if MODE
698 is smaller than mode of REG, otherwise paradoxical SUBREG. */
699
700rtx
701gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
702{
703 enum machine_mode inmode;
704
705 inmode = GET_MODE (reg);
706 if (inmode == VOIDmode)
707 inmode = mode;
708 return gen_rtx_SUBREG (mode, reg,
709 subreg_lowpart_offset (mode, inmode));
710}
711
712/* gen_rtvec (n, [rt1, ..., rtn])
713**
714** This routine creates an rtvec and stores within it the
715** pointers to rtx's which are its arguments.
716*/
717
718/*VARARGS1*/
719rtvec

--- 43 unchanged lines hidden (view full) ---

763rtx
764gen_reg_rtx (enum machine_mode mode)
765{
766 struct function *f = cfun;
767 rtx val;
768
769 /* Don't let anything called after initial flow analysis create new
770 registers. */
771 gcc_assert (!no_new_pseudos);
772
773 if (generating_concat_p
774 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
775 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
776 {
777 /* For complex modes, don't make a single pseudo.
778 Instead, make a CONCAT of two pseudos.
779 This allows noncontiguous allocation of the real and imaginary parts,

--- 28 unchanged lines hidden (view full) ---

808 f->emit->regno_pointer_align_length = old_size * 2;
809 }
810
811 val = gen_raw_REG (mode, reg_rtx_no);
812 regno_reg_rtx[reg_rtx_no++] = val;
813 return val;
814}
815
816/* Generate a register with same attributes as REG, but offsetted by OFFSET.
817 Do the big endian correction if needed. */
818
819rtx
820gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, int offset)
821{
822 rtx new = gen_rtx_REG (mode, regno);
823 tree decl;
824 HOST_WIDE_INT var_size;
825
826 /* PR middle-end/14084
827 The problem appears when a variable is stored in a larger register
828 and later it is used in the original mode or some mode in between
829 or some part of variable is accessed.
830
831 On little endian machines there is no problem because
832 the REG_OFFSET of the start of the variable is the same when
833 accessed in any mode (it is 0).
834
835 However, this is not true on big endian machines.
836 The offset of the start of the variable is different when accessed
837 in different modes.
838 When we are taking a part of the REG we have to change the OFFSET
839 from offset WRT size of mode of REG to offset WRT size of variable.
840
841 If we would not do the big endian correction the resulting REG_OFFSET
842 would be larger than the size of the DECL.
843
844 Examples of correction, for BYTES_BIG_ENDIAN WORDS_BIG_ENDIAN machine:
845
846 REG.mode MODE DECL size old offset new offset description
847 DI SI 4 4 0 int32 in SImode
848 DI SI 1 4 0 char in SImode
849 DI QI 1 7 0 char in QImode
850 DI QI 4 5 1 1st element in QImode
851 of char[4]
852 DI HI 4 6 2 1st element in HImode
853 of int16[2]
854
855 If the size of DECL is equal or greater than the size of REG
856 we can't do this correction because the register holds the
857 whole variable or a part of the variable and thus the REG_OFFSET
858 is already correct. */
859
860 decl = REG_EXPR (reg);
861 if ((BYTES_BIG_ENDIAN || WORDS_BIG_ENDIAN)
862 && decl != NULL
863 && offset > 0
864 && GET_MODE_SIZE (GET_MODE (reg)) > GET_MODE_SIZE (mode)
865 && ((var_size = int_size_in_bytes (TREE_TYPE (decl))) > 0
866 && var_size < GET_MODE_SIZE (GET_MODE (reg))))
867 {
868 int offset_le;
869
870 /* Convert machine endian to little endian WRT size of mode of REG. */
871 if (WORDS_BIG_ENDIAN)
872 offset_le = ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
873 / UNITS_PER_WORD) * UNITS_PER_WORD;
874 else
875 offset_le = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
876
877 if (BYTES_BIG_ENDIAN)
878 offset_le += ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
879 % UNITS_PER_WORD);
880 else
881 offset_le += offset % UNITS_PER_WORD;
882
883 if (offset_le >= var_size)
884 {
885 /* MODE is wider than the variable so the new reg will cover
886 the whole variable so the resulting OFFSET should be 0. */
887 offset = 0;
888 }
889 else
890 {
891 /* Convert little endian to machine endian WRT size of variable. */
892 if (WORDS_BIG_ENDIAN)
893 offset = ((var_size - 1 - offset_le)
894 / UNITS_PER_WORD) * UNITS_PER_WORD;
895 else
896 offset = (offset_le / UNITS_PER_WORD) * UNITS_PER_WORD;
897
898 if (BYTES_BIG_ENDIAN)
899 offset += ((var_size - 1 - offset_le)
900 % UNITS_PER_WORD);
901 else
902 offset += offset_le % UNITS_PER_WORD;
903 }
904 }
905
906 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
907 REG_OFFSET (reg) + offset);
908 return new;
909}
910
911/* Set the decl for MEM to DECL. */
912
913void

--- 5 unchanged lines hidden (view full) ---

919}
920
921/* Set the register attributes for registers contained in PARM_RTX.
922 Use needed values from memory attributes of MEM. */
923
924void
925set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
926{
927 if (REG_P (parm_rtx))
928 set_reg_attrs_from_mem (parm_rtx, mem);
929 else if (GET_CODE (parm_rtx) == PARALLEL)
930 {
931 /* Check for a NULL entry in the first slot, used to indicate that the
932 parameter goes both on the stack and in registers. */
933 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
934 for (; i < XVECLEN (parm_rtx, 0); i++)
935 {
936 rtx x = XVECEXP (parm_rtx, 0, i);
937 if (REG_P (XEXP (x, 0)))
938 REG_ATTRS (XEXP (x, 0))
939 = get_reg_attrs (MEM_EXPR (mem),
940 INTVAL (XEXP (x, 1)));
941 }
942 }
943}
944
945/* Assign the RTX X to declaration T. */
946void
947set_decl_rtl (tree t, rtx x)
948{
949 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
950
951 if (!x)
952 return;
953 /* For register, we maintain the reverse information too. */
954 if (REG_P (x))
955 REG_ATTRS (x) = get_reg_attrs (t, 0);
956 else if (GET_CODE (x) == SUBREG)
957 REG_ATTRS (SUBREG_REG (x))
958 = get_reg_attrs (t, -SUBREG_BYTE (x));
959 if (GET_CODE (x) == CONCAT)
960 {
961 if (REG_P (XEXP (x, 0)))
962 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);

--- 8 unchanged lines hidden (view full) ---

971 {
972 rtx y = XVECEXP (x, 0, i);
973 if (REG_P (XEXP (y, 0)))
974 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
975 }
976 }
977}
978
979/* Assign the RTX X to parameter declaration T. */
980void
981set_decl_incoming_rtl (tree t, rtx x)
982{
983 DECL_INCOMING_RTL (t) = x;
984
985 if (!x)
986 return;
987 /* For register, we maintain the reverse information too. */
988 if (REG_P (x))
989 REG_ATTRS (x) = get_reg_attrs (t, 0);
990 else if (GET_CODE (x) == SUBREG)
991 REG_ATTRS (SUBREG_REG (x))
992 = get_reg_attrs (t, -SUBREG_BYTE (x));
993 if (GET_CODE (x) == CONCAT)
994 {
995 if (REG_P (XEXP (x, 0)))
996 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
997 if (REG_P (XEXP (x, 1)))
998 REG_ATTRS (XEXP (x, 1))
999 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1000 }
1001 if (GET_CODE (x) == PARALLEL)
1002 {
1003 int i, start;
1004
1005 /* Check for a NULL entry, used to indicate that the parameter goes
1006 both on the stack and in registers. */
1007 if (XEXP (XVECEXP (x, 0, 0), 0))
1008 start = 0;
1009 else
1010 start = 1;
1011
1012 for (i = start; i < XVECLEN (x, 0); i++)
1013 {
1014 rtx y = XVECEXP (x, 0, i);
1015 if (REG_P (XEXP (y, 0)))
1016 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1017 }
1018 }
1019}
1020
1021/* Identify REG (which may be a CONCAT) as a user register. */
1022
1023void
1024mark_user_reg (rtx reg)
1025{
1026 if (GET_CODE (reg) == CONCAT)
1027 {
1028 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1029 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1030 }
1031 else
1032 {
1033 gcc_assert (REG_P (reg));
1034 REG_USERVAR_P (reg) = 1;
1035 }
1036}
1037
1038/* Identify REG as a probable pointer register and show its alignment
1039 as ALIGN, if nonzero. */
1040
1041void
1042mark_reg_pointer (rtx reg, int align)
1043{

--- 17 unchanged lines hidden (view full) ---

1061 return reg_rtx_no;
1062}
1063
1064/* Return 1 + the largest label number used so far in the current function. */
1065
1066int
1067max_label_num (void)
1068{
1069 return label_num;
1070}
1071
1072/* Return first label number used in this function (if any were used). */
1073
1074int
1075get_first_label_num (void)
1076{
1077 return first_label_num;
1078}
1079
1080/* If the rtx for label was created during the expansion of a nested
1081 function, then first_label_num won't include this label number.
1082 Fix this now so that array indicies work later. */
1083
1084void
1085maybe_set_first_label_num (rtx x)
1086{
1087 if (CODE_LABEL_NUMBER (x) < first_label_num)
1088 first_label_num = CODE_LABEL_NUMBER (x);
1089}
1090
1091/* Return a value representing some low-order bits of X, where the number
1092 of low-order bits is given by MODE. Note that no conversion is done
1093 between floating-point and fixed-point values, rather, the bit
1094 representation is returned.
1095
1096 This function handles the cases in common between gen_lowpart, below,
1097 and two variants in cse.c and combine.c. These are the cases that can
1098 be safely handled at all points in the compilation.

--- 6 unchanged lines hidden (view full) ---

1105 int msize = GET_MODE_SIZE (mode);
1106 int xsize;
1107 int offset = 0;
1108 enum machine_mode innermode;
1109
1110 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1111 so we have to make one up. Yuk. */
1112 innermode = GET_MODE (x);
1113 if (GET_CODE (x) == CONST_INT
1114 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1115 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1116 else if (innermode == VOIDmode)
1117 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1118
1119 xsize = GET_MODE_SIZE (innermode);
1120
1121 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1122
1123 if (innermode == mode)
1124 return x;
1125
1126 /* MODE must occupy no more words than the mode of X. */
1127 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1128 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1129 return 0;
1130
1131 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1132 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1133 return 0;
1134
1135 offset = subreg_lowpart_offset (mode, innermode);
1136
1137 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1138 && (GET_MODE_CLASS (mode) == MODE_INT
1139 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1140 {

--- 7 unchanged lines hidden (view full) ---

1148
1149 if (GET_MODE (XEXP (x, 0)) == mode)
1150 return XEXP (x, 0);
1151 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1152 return gen_lowpart_common (mode, XEXP (x, 0));
1153 else if (msize < xsize)
1154 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1155 }
1156 else if (GET_CODE (x) == SUBREG || REG_P (x)
1157 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1158 || GET_CODE (x) == CONST_DOUBLE || GET_CODE (x) == CONST_INT)
1159 return simplify_gen_subreg (mode, x, innermode, offset);
1160
1161 /* Otherwise, we can't do this. */
1162 return 0;
1163}
1164
1165rtx
1166gen_highpart (enum machine_mode mode, rtx x)
1167{
1168 unsigned int msize = GET_MODE_SIZE (mode);
1169 rtx result;
1170
1171 /* This case loses if X is a subreg. To catch bugs early,
1172 complain if an invalid MODE is used even in other cases. */
1173 gcc_assert (msize <= UNITS_PER_WORD
1174 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1175
1176 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1177 subreg_highpart_offset (mode, GET_MODE (x)));
1178 gcc_assert (result);
1179
1180 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1181 the target if we have a MEM. gen_highpart must return a valid operand,
1182 emitting code if necessary to do so. */
1183 if (MEM_P (result))
1184 {
1185 result = validize_mem (result);
1186 gcc_assert (result);
1187 }
1188
1189 return result;
1190}
1191
1192/* Like gen_highpart, but accept mode of EXP operand in case EXP can
1193 be VOIDmode constant. */
1194rtx
1195gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1196{
1197 if (GET_MODE (exp) != VOIDmode)
1198 {
1199 gcc_assert (GET_MODE (exp) == innermode);
1200 return gen_highpart (outermode, exp);
1201 }
1202 return simplify_gen_subreg (outermode, exp, innermode,
1203 subreg_highpart_offset (outermode, innermode));
1204}
1205
1206/* Return offset in bytes to get OUTERMODE low part
1207 of the value in mode INNERMODE stored in memory in target format. */

--- 18 unchanged lines hidden (view full) ---

1226/* Return offset in bytes to get OUTERMODE high part
1227 of the value in mode INNERMODE stored in memory in target format. */
1228unsigned int
1229subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1230{
1231 unsigned int offset = 0;
1232 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1233
1234 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1235
1236 if (difference > 0)
1237 {
1238 if (! WORDS_BIG_ENDIAN)
1239 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1240 if (! BYTES_BIG_ENDIAN)
1241 offset += difference % UNITS_PER_WORD;
1242 }

--- 43 unchanged lines hidden (view full) ---

1286 */
1287
1288rtx
1289operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1290{
1291 if (mode == VOIDmode)
1292 mode = GET_MODE (op);
1293
1294 gcc_assert (mode != VOIDmode);
1295
1296 /* If OP is narrower than a word, fail. */
1297 if (mode != BLKmode
1298 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1299 return 0;
1300
1301 /* If we want a word outside OP, return zero. */
1302 if (mode != BLKmode
1303 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1304 return const0_rtx;
1305
1306 /* Form a new MEM at the requested address. */
1307 if (MEM_P (op))
1308 {
1309 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1310
1311 if (! validate_address)
1312 return new;
1313
1314 else if (reload_completed)
1315 {
1316 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1317 return 0;
1318 }
1319 else
1320 return replace_equiv_address (new, XEXP (new, 0));
1321 }
1322
1323 /* Rest can be handled by simplify_subreg. */
1324 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1325}
1326
1327/* Similar to `operand_subword', but never return 0. If we can't
1328 extract the required subword, put OP into a register and try again.
1329 The second attempt must succeed. We always validate the address in
1330 this case.
1331
1332 MODE is the mode of OP, in case it is CONST_INT. */
1333
1334rtx
1335operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1336{
1337 rtx result = operand_subword (op, offset, 1, mode);
1338
1339 if (result)
1340 return result;
1341
1342 if (mode != BLKmode && mode != VOIDmode)
1343 {
1344 /* If this is a register which can not be accessed by words, copy it
1345 to a pseudo register. */
1346 if (REG_P (op))
1347 op = copy_to_reg (op);
1348 else
1349 op = force_reg (mode, op);
1350 }
1351
1352 result = operand_subword (op, offset, 1, mode);
1353 gcc_assert (result);
1354
1355 return result;
1356}
1357
1358/* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1359 or (2) a component ref of something variable. Represent the later with
1360 a NULL expression. */
1361
1362static tree
1363component_ref_for_mem_expr (tree ref)
1364{
1365 tree inner = TREE_OPERAND (ref, 0);
1366
1367 if (TREE_CODE (inner) == COMPONENT_REF)
1368 inner = component_ref_for_mem_expr (inner);
1369 else
1370 {
1371 /* Now remove any conversions: they don't change what the underlying
1372 object is. Likewise for SAVE_EXPR. */
1373 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1374 || TREE_CODE (inner) == NON_LVALUE_EXPR
1375 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1376 || TREE_CODE (inner) == SAVE_EXPR)
1377 inner = TREE_OPERAND (inner, 0);
1378
1379 if (! DECL_P (inner))
1380 inner = NULL_TREE;
1381 }
1382
1383 if (inner == TREE_OPERAND (ref, 0))
1384 return ref;
1385 else
1386 return build3 (COMPONENT_REF, TREE_TYPE (ref), inner,
1387 TREE_OPERAND (ref, 1), NULL_TREE);
1388}
1389
1390/* Returns 1 if both MEM_EXPR can be considered equal
1391 and 0 otherwise. */
1392
1393int
1394mem_expr_equal_p (tree expr1, tree expr2)
1395{

--- 8 unchanged lines hidden (view full) ---

1404
1405 if (TREE_CODE (expr1) == COMPONENT_REF)
1406 return
1407 mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1408 TREE_OPERAND (expr2, 0))
1409 && mem_expr_equal_p (TREE_OPERAND (expr1, 1), /* field decl */
1410 TREE_OPERAND (expr2, 1));
1411
1412 if (INDIRECT_REF_P (expr1))
1413 return mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1414 TREE_OPERAND (expr2, 0));
1415
1416 /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already
1417 have been resolved here. */
1418 gcc_assert (DECL_P (expr1));
1419
1420 /* Decls with different pointers can't be equal. */
1421 return 0;
1422}
1423
1424/* Given REF, a MEM, and T, either the type of X or the expression
1425 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1426 if we are making a new object of this type. BITPOS is nonzero if
1427 there is an offset outstanding on T that will be applied later. */
1428
1429void

--- 17 unchanged lines hidden (view full) ---

1447 type = TYPE_P (t) ? t : TREE_TYPE (t);
1448 if (type == error_mark_node)
1449 return;
1450
1451 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1452 wrong answer, as it assumes that DECL_RTL already has the right alias
1453 info. Callers should not set DECL_RTL until after the call to
1454 set_mem_attributes. */
1455 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1456
1457 /* Get the alias set from the expression or type (perhaps using a
1458 front-end routine) and use it. */
1459 alias = get_alias_set (t);
1460
1461 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1462 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1463 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1464
1465 /* If we are making an object of this type, or if this is a DECL, we know
1466 that it is a scalar if the type is not an aggregate. */
1467 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1468 MEM_SCALAR_P (ref) = 1;
1469
1470 /* We can set the alignment from the type if we are making an object,
1471 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1472 if (objectp || TREE_CODE (t) == INDIRECT_REF
1473 || TREE_CODE (t) == ALIGN_INDIRECT_REF
1474 || TYPE_ALIGN_OK (type))
1475 align = MAX (align, TYPE_ALIGN (type));
1476 else
1477 if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1478 {
1479 if (integer_zerop (TREE_OPERAND (t, 1)))
1480 /* We don't know anything about the alignment. */
1481 align = BITS_PER_UNIT;
1482 else
1483 align = tree_low_cst (TREE_OPERAND (t, 1), 1);
1484 }
1485
1486 /* If the size is known, we can set that. */
1487 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1488 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1489
1490 /* If T is not a type, we may be able to deduce some more information about
1491 the expression. */
1492 if (! TYPE_P (t))
1493 {
1494 tree base;
1495
1496 if (TREE_THIS_VOLATILE (t))
1497 MEM_VOLATILE_P (ref) = 1;
1498
1499 /* Now remove any conversions: they don't change what the underlying
1500 object is. Likewise for SAVE_EXPR. */
1501 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1502 || TREE_CODE (t) == NON_LVALUE_EXPR
1503 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1504 || TREE_CODE (t) == SAVE_EXPR)
1505 t = TREE_OPERAND (t, 0);
1506
1507 /* We may look through structure-like accesses for the purposes of
1508 examining TREE_THIS_NOTRAP, but not array-like accesses. */
1509 base = t;
1510 while (TREE_CODE (base) == COMPONENT_REF
1511 || TREE_CODE (base) == REALPART_EXPR
1512 || TREE_CODE (base) == IMAGPART_EXPR
1513 || TREE_CODE (base) == BIT_FIELD_REF)
1514 base = TREE_OPERAND (base, 0);
1515
1516 if (DECL_P (base))
1517 {
1518 if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
1519 MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
1520 else
1521 MEM_NOTRAP_P (ref) = 1;
1522 }
1523 else
1524 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
1525
1526 base = get_base_address (base);
1527 if (base && DECL_P (base)
1528 && TREE_READONLY (base)
1529 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1530 {
1531 tree base_type = TREE_TYPE (base);
1532 gcc_assert (!(base_type && TYPE_NEEDS_CONSTRUCTING (base_type))
1533 || DECL_ARTIFICIAL (base));
1534 MEM_READONLY_P (ref) = 1;
1535 }
1536
1537 /* If this expression uses it's parent's alias set, mark it such
1538 that we won't change it. */
1539 if (component_uses_parent_alias_set (t))
1540 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1541
1542 /* If this is a decl, set the attributes of the MEM from it. */
1543 if (DECL_P (t))
1544 {
1545 expr = t;
1546 offset = const0_rtx;
1547 apply_bitpos = bitpos;
1548 size = (DECL_SIZE_UNIT (t)
1549 && host_integerp (DECL_SIZE_UNIT (t), 1)
1550 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1551 align = DECL_ALIGN (t);
1552 }
1553
1554 /* If this is a constant, we know the alignment. */
1555 else if (CONSTANT_CLASS_P (t))
1556 {
1557 align = TYPE_ALIGN (type);
1558#ifdef CONSTANT_ALIGNMENT
1559 align = CONSTANT_ALIGNMENT (t, align);
1560#endif
1561 }
1562
1563 /* If this is a field reference and not a bit-field, record it. */

--- 16 unchanged lines hidden (view full) ---

1580 tree off_tree = size_zero_node;
1581 /* We can't modify t, because we use it at the end of the
1582 function. */
1583 tree t2 = t;
1584
1585 do
1586 {
1587 tree index = TREE_OPERAND (t2, 1);
1588 tree low_bound = array_ref_low_bound (t2);
1589 tree unit_size = array_ref_element_size (t2);
1590
1591 /* We assume all arrays have sizes that are a multiple of a byte.
1592 First subtract the lower bound, if any, in the type of the
1593 index, then convert to sizetype and multiply by the size of
1594 the array element. */
1595 if (! integer_zerop (low_bound))
1596 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1597 index, low_bound);
1598
1599 off_tree = size_binop (PLUS_EXPR,
1600 size_binop (MULT_EXPR,
1601 fold_convert (sizetype,
1602 index),
1603 unit_size),
1604 off_tree);
1605 t2 = TREE_OPERAND (t2, 0);
1606 }
1607 while (TREE_CODE (t2) == ARRAY_REF);
1608
1609 if (DECL_P (t2))
1610 {
1611 expr = t2;
1612 offset = NULL;

--- 15 unchanged lines hidden (view full) ---

1628 {
1629 offset = GEN_INT (tree_low_cst (off_tree, 1));
1630 apply_bitpos = bitpos;
1631 }
1632 /* ??? Any reason the field size would be different than
1633 the size we got from the type? */
1634 }
1635 else if (flag_argument_noalias > 1
1636 && (INDIRECT_REF_P (t2))
1637 && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
1638 {
1639 expr = t2;
1640 offset = NULL;
1641 }
1642 }
1643
1644 /* If this is a Fortran indirect argument reference, record the
1645 parameter decl. */
1646 else if (flag_argument_noalias > 1
1647 && (INDIRECT_REF_P (t))
1648 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1649 {
1650 expr = t;
1651 offset = NULL;
1652 }
1653 }
1654
1655 /* If we modified OFFSET based on T, then subtract the outstanding
1656 bit position offset. Similarly, increase the size of the accessed
1657 object to contain the negative offset. */
1658 if (apply_bitpos)
1659 {
1660 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1661 if (size)
1662 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1663 }
1664
1665 if (TREE_CODE (t) == ALIGN_INDIRECT_REF)
1666 {
1667 /* Force EXPR and OFFSE to NULL, since we don't know exactly what
1668 we're overlapping. */
1669 offset = NULL;
1670 expr = NULL;
1671 }
1672
1673 /* Now set the attributes we computed above. */
1674 MEM_ATTRS (ref)
1675 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1676
1677 /* If this is already known to be a scalar or aggregate, we are done. */
1678 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1679 return;
1680

--- 24 unchanged lines hidden (view full) ---

1705
1706/* Set the alias set of MEM to SET. */
1707
1708void
1709set_mem_alias_set (rtx mem, HOST_WIDE_INT set)
1710{
1711#ifdef ENABLE_CHECKING
1712 /* If the new and old alias sets don't conflict, something is wrong. */
1713 gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1714#endif
1715
1716 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1717 MEM_SIZE (mem), MEM_ALIGN (mem),
1718 GET_MODE (mem));
1719}
1720
1721/* Set the alignment of MEM to ALIGN bits. */

--- 42 unchanged lines hidden (view full) ---

1764 returned memory location is required to be valid. The memory
1765 attributes are not changed. */
1766
1767static rtx
1768change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1769{
1770 rtx new;
1771
1772 gcc_assert (MEM_P (memref));
1773 if (mode == VOIDmode)
1774 mode = GET_MODE (memref);
1775 if (addr == 0)
1776 addr = XEXP (memref, 0);
1777 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1778 && (!validate || memory_address_p (mode, addr)))
1779 return memref;
1780
1781 if (validate)
1782 {
1783 if (reload_in_progress || reload_completed)
1784 gcc_assert (memory_address_p (mode, addr));
1785 else
1786 addr = memory_address (mode, addr);
1787 }
1788
1789 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1790 return memref;
1791
1792 new = gen_rtx_MEM (mode, addr);

--- 202 unchanged lines hidden (view full) ---

1995 if (! memoffset)
1996 expr = NULL_TREE;
1997
1998 while (expr)
1999 {
2000 if (TREE_CODE (expr) == COMPONENT_REF)
2001 {
2002 tree field = TREE_OPERAND (expr, 1);
2003 tree offset = component_ref_field_offset (expr);
2004
2005 if (! DECL_SIZE_UNIT (field))
2006 {
2007 expr = NULL_TREE;
2008 break;
2009 }
2010
2011 /* Is the field at least as large as the access? If so, ok,
2012 otherwise strip back to the containing structure. */
2013 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2014 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2015 && INTVAL (memoffset) >= 0)
2016 break;
2017
2018 if (! host_integerp (offset, 1))
2019 {
2020 expr = NULL_TREE;
2021 break;
2022 }
2023
2024 expr = TREE_OPERAND (expr, 0);
2025 memoffset
2026 = (GEN_INT (INTVAL (memoffset)
2027 + tree_low_cst (offset, 1)
2028 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2029 / BITS_PER_UNIT)));
2030 }
2031 /* Similarly for the decl. */
2032 else if (DECL_P (expr)
2033 && DECL_SIZE_UNIT (expr)
2034 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2035 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2036 && (! memoffset || INTVAL (memoffset) >= 0))
2037 break;

--- 42 unchanged lines hidden (view full) ---

2080 last_insn = last;
2081 cur_insn_uid = 0;
2082
2083 for (insn = first; insn; insn = NEXT_INSN (insn))
2084 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2085
2086 cur_insn_uid++;
2087}
2088
2089/* Go through all the RTL insn bodies and copy any invalid shared
2090 structure. This routine should only be called once. */
2091
2092static void
2093unshare_all_rtl_1 (tree fndecl, rtx insn)
2094{
2095 tree decl;
2096
2097 /* Make sure that virtual parameters are not shared. */
2098 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2099 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2100
2101 /* Make sure that virtual stack slots are not shared. */

--- 34 unchanged lines hidden (view full) ---

2136 reset_used_decls (DECL_INITIAL (cfun->decl));
2137
2138 /* Make sure that virtual parameters are not shared. */
2139 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2140 reset_used_flags (DECL_RTL (decl));
2141
2142 reset_used_flags (stack_slot_list);
2143
2144 unshare_all_rtl_1 (cfun->decl, insn);
2145}
2146
2147unsigned int
2148unshare_all_rtl (void)
2149{
2150 unshare_all_rtl_1 (current_function_decl, get_insns ());
2151 return 0;
2152}
2153
2154struct tree_opt_pass pass_unshare_all_rtl =
2155{
2156 "unshare", /* name */
2157 NULL, /* gate */
2158 unshare_all_rtl, /* execute */
2159 NULL, /* sub */
2160 NULL, /* next */
2161 0, /* static_pass_number */
2162 0, /* tv_id */
2163 0, /* properties_required */
2164 0, /* properties_provided */
2165 0, /* properties_destroyed */
2166 0, /* todo_flags_start */
2167 TODO_dump_func, /* todo_flags_finish */
2168 0 /* letter */
2169};
2170
2171
2172/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2173 Recursively does the same for subexpressions. */
2174
2175static void
2176verify_rtx_sharing (rtx orig, rtx insn)
2177{
2178 rtx x = orig;
2179 int i;

--- 5 unchanged lines hidden (view full) ---

2185
2186 code = GET_CODE (x);
2187
2188 /* These types may be freely shared. */
2189
2190 switch (code)
2191 {
2192 case REG:
2193 case CONST_INT:
2194 case CONST_DOUBLE:
2195 case CONST_VECTOR:
2196 case SYMBOL_REF:
2197 case LABEL_REF:
2198 case CODE_LABEL:
2199 case PC:
2200 case CC0:
2201 case SCRATCH:
2202 return;
2203 /* SCRATCH must be shared because they represent distinct values. */
2204 case CLOBBER:
2205 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2206 return;
2207 break;
2208
2209 case CONST:
2210 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2211 a LABEL_REF, it isn't sharable. */
2212 if (GET_CODE (XEXP (x, 0)) == PLUS
2213 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2214 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2215 return;

--- 8 unchanged lines hidden (view full) ---

2224 break;
2225
2226 default:
2227 break;
2228 }
2229
2230 /* This rtx may not be shared. If it has already been seen,
2231 replace it with a copy of itself. */
2232#ifdef ENABLE_CHECKING
2233 if (RTX_FLAG (x, used))
2234 {
2235 error ("invalid rtl sharing found in the insn");
2236 debug_rtx (insn);
2237 error ("shared rtx");
2238 debug_rtx (x);
2239 internal_error ("internal consistency failure");
2240 }
2241#endif
2242 gcc_assert (!RTX_FLAG (x, used));
2243
2244 RTX_FLAG (x, used) = 1;
2245
2246 /* Now scan the subexpressions recursively. */
2247
2248 format_ptr = GET_RTX_FORMAT (code);
2249
2250 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2251 {

--- 6 unchanged lines hidden (view full) ---

2258 case 'E':
2259 if (XVEC (x, i) != NULL)
2260 {
2261 int j;
2262 int len = XVECLEN (x, i);
2263
2264 for (j = 0; j < len; j++)
2265 {
2266 /* We allow sharing of ASM_OPERANDS inside single
2267 instruction. */
2268 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2269 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2270 == ASM_OPERANDS))
2271 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2272 else
2273 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2274 }
2275 }
2276 break;
2277 }
2278 }

--- 69 unchanged lines hidden (view full) ---

2348 if (DECL_RTL_SET_P (t))
2349 reset_used_flags (DECL_RTL (t));
2350
2351 /* Now process sub-blocks. */
2352 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2353 reset_used_decls (t);
2354}
2355
2356/* Mark ORIG as in use, and return a copy of it if it was already in use.
2357 Recursively does the same for subexpressions. Uses
2358 copy_rtx_if_shared_1 to reduce stack space. */
2359
2360rtx
2361copy_rtx_if_shared (rtx orig)
2362{
2363 copy_rtx_if_shared_1 (&orig);

--- 23 unchanged lines hidden (view full) ---

2387
2388 code = GET_CODE (x);
2389
2390 /* These types may be freely shared. */
2391
2392 switch (code)
2393 {
2394 case REG:
2395 case CONST_INT:
2396 case CONST_DOUBLE:
2397 case CONST_VECTOR:
2398 case SYMBOL_REF:
2399 case LABEL_REF:
2400 case CODE_LABEL:
2401 case PC:
2402 case CC0:
2403 case SCRATCH:
2404 /* SCRATCH must be shared because they represent distinct values. */
2405 return;
2406 case CLOBBER:
2407 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2408 return;
2409 break;
2410
2411 case CONST:
2412 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2413 a LABEL_REF, it isn't sharable. */
2414 if (GET_CODE (XEXP (x, 0)) == PLUS
2415 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2416 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2417 return;

--- 11 unchanged lines hidden (view full) ---

2429 break;
2430 }
2431
2432 /* This rtx may not be shared. If it has already been seen,
2433 replace it with a copy of itself. */
2434
2435 if (RTX_FLAG (x, used))
2436 {
2437 x = shallow_copy_rtx (x);
2438 copied = 1;
2439 }
2440 RTX_FLAG (x, used) = 1;
2441
2442 /* Now scan the subexpressions recursively.
2443 We can store any replaced subexpressions directly into X
2444 since we know X is not shared! Any vectors in X
2445 must be copied if X was copied. */

--- 62 unchanged lines hidden (view full) ---

2508 code = GET_CODE (x);
2509
2510 /* These types may be freely shared so we needn't do any resetting
2511 for them. */
2512
2513 switch (code)
2514 {
2515 case REG:
2516 case CONST_INT:
2517 case CONST_DOUBLE:
2518 case CONST_VECTOR:
2519 case SYMBOL_REF:
2520 case CODE_LABEL:
2521 case PC:
2522 case CC0:
2523 return;

--- 53 unchanged lines hidden (view full) ---

2577 code = GET_CODE (x);
2578
2579 /* These types may be freely shared so we needn't do any resetting
2580 for them. */
2581
2582 switch (code)
2583 {
2584 case REG:
2585 case CONST_INT:
2586 case CONST_DOUBLE:
2587 case CONST_VECTOR:
2588 case SYMBOL_REF:
2589 case CODE_LABEL:
2590 case PC:
2591 case CC0:
2592 return;

--- 47 unchanged lines hidden (view full) ---

2640 case SIGN_EXTEND:
2641 case ZERO_EXTEND:
2642 other = XEXP (other, 0);
2643 break;
2644 default:
2645 goto done;
2646 }
2647 done:
2648 if ((MEM_P (other)
2649 && ! CONSTANT_P (x)
2650 && !REG_P (x)
2651 && GET_CODE (x) != SUBREG)
2652 || (REG_P (other)
2653 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2654 || reg_mentioned_p (other, x))))
2655 {
2656 rtx temp = gen_reg_rtx (GET_MODE (x));
2657 emit_move_insn (temp, x);
2658 return temp;
2659 }
2660 return x;

--- 9 unchanged lines hidden (view full) ---

2670 return first_insn;
2671}
2672
2673/* Specify a new insn as the first in the chain. */
2674
2675void
2676set_first_insn (rtx insn)
2677{
2678 gcc_assert (!PREV_INSN (insn));
2679 first_insn = insn;
2680}
2681
2682/* Return the last insn emitted in current sequence or current function. */
2683
2684rtx
2685get_last_insn (void)
2686{
2687 return last_insn;
2688}
2689
2690/* Specify a new insn as the last in the chain. */
2691
2692void
2693set_last_insn (rtx insn)
2694{
2695 gcc_assert (!NEXT_INSN (insn));
2696 last_insn = insn;
2697}
2698
2699/* Return the last insn emitted, even if it is in a sequence now pushed. */
2700
2701rtx
2702get_last_insn_anywhere (void)
2703{

--- 18 unchanged lines hidden (view full) ---

2722 {
2723 if (NOTE_P (insn))
2724 for (insn = next_insn (insn);
2725 insn && NOTE_P (insn);
2726 insn = next_insn (insn))
2727 continue;
2728 else
2729 {
2730 if (NONJUMP_INSN_P (insn)
2731 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2732 insn = XVECEXP (PATTERN (insn), 0, 0);
2733 }
2734 }
2735
2736 return insn;
2737}
2738

--- 9 unchanged lines hidden (view full) ---

2748 {
2749 if (NOTE_P (insn))
2750 for (insn = previous_insn (insn);
2751 insn && NOTE_P (insn);
2752 insn = previous_insn (insn))
2753 continue;
2754 else
2755 {
2756 if (NONJUMP_INSN_P (insn)
2757 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2758 insn = XVECEXP (PATTERN (insn), 0,
2759 XVECLEN (PATTERN (insn), 0) - 1);
2760 }
2761 }
2762
2763 return insn;
2764}

--- 4 unchanged lines hidden (view full) ---

2769get_max_uid (void)
2770{
2771 return cur_insn_uid;
2772}
2773
2774/* Renumber instructions so that no instruction UIDs are wasted. */
2775
2776void
2777renumber_insns (void)
2778{
2779 rtx insn;
2780
2781 /* If we're not supposed to renumber instructions, don't. */
2782 if (!flag_renumber_insns)
2783 return;
2784
2785 /* If there aren't that many instructions, then it's not really
2786 worth renumbering them. */
2787 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2788 return;
2789
2790 cur_insn_uid = 1;
2791
2792 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2793 {
2794 if (dump_file)
2795 fprintf (dump_file, "Renumbering insn %d to %d\n",
2796 INSN_UID (insn), cur_insn_uid);
2797 INSN_UID (insn) = cur_insn_uid++;
2798 }
2799}
2800
2801/* Return the next insn. If it is a SEQUENCE, return the first insn
2802 of the sequence. */
2803
2804rtx
2805next_insn (rtx insn)
2806{
2807 if (insn)
2808 {
2809 insn = NEXT_INSN (insn);
2810 if (insn && NONJUMP_INSN_P (insn)
2811 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2812 insn = XVECEXP (PATTERN (insn), 0, 0);
2813 }
2814
2815 return insn;
2816}
2817
2818/* Return the previous insn. If it is a SEQUENCE, return the last insn
2819 of the sequence. */
2820
2821rtx
2822previous_insn (rtx insn)
2823{
2824 if (insn)
2825 {
2826 insn = PREV_INSN (insn);
2827 if (insn && NONJUMP_INSN_P (insn)
2828 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2829 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2830 }
2831
2832 return insn;
2833}
2834
2835/* Return the next insn after INSN that is not a NOTE. This routine does not
2836 look inside SEQUENCEs. */
2837
2838rtx
2839next_nonnote_insn (rtx insn)
2840{
2841 while (insn)
2842 {
2843 insn = NEXT_INSN (insn);
2844 if (insn == 0 || !NOTE_P (insn))
2845 break;
2846 }
2847
2848 return insn;
2849}
2850
2851/* Return the previous insn before INSN that is not a NOTE. This routine does
2852 not look inside SEQUENCEs. */
2853
2854rtx
2855prev_nonnote_insn (rtx insn)
2856{
2857 while (insn)
2858 {
2859 insn = PREV_INSN (insn);
2860 if (insn == 0 || !NOTE_P (insn))
2861 break;
2862 }
2863
2864 return insn;
2865}
2866
2867/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2868 or 0, if there is none. This routine does not look inside
2869 SEQUENCEs. */
2870
2871rtx
2872next_real_insn (rtx insn)
2873{
2874 while (insn)
2875 {
2876 insn = NEXT_INSN (insn);
2877 if (insn == 0 || INSN_P (insn))
2878 break;
2879 }
2880
2881 return insn;
2882}
2883
2884/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2885 or 0, if there is none. This routine does not look inside
2886 SEQUENCEs. */
2887
2888rtx
2889prev_real_insn (rtx insn)
2890{
2891 while (insn)
2892 {
2893 insn = PREV_INSN (insn);
2894 if (insn == 0 || INSN_P (insn))
2895 break;
2896 }
2897
2898 return insn;
2899}
2900
2901/* Return the last CALL_INSN in the current list, or 0 if there is none.
2902 This routine does not look inside SEQUENCEs. */
2903
2904rtx
2905last_call_insn (void)
2906{
2907 rtx insn;
2908
2909 for (insn = get_last_insn ();
2910 insn && !CALL_P (insn);
2911 insn = PREV_INSN (insn))
2912 ;
2913
2914 return insn;
2915}
2916
2917/* Find the next insn after INSN that really does something. This routine
2918 does not look inside SEQUENCEs. Until reload has completed, this is the
2919 same as next_real_insn. */
2920
2921int
2922active_insn_p (rtx insn)
2923{
2924 return (CALL_P (insn) || JUMP_P (insn)
2925 || (NONJUMP_INSN_P (insn)
2926 && (! reload_completed
2927 || (GET_CODE (PATTERN (insn)) != USE
2928 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2929}
2930
2931rtx
2932next_active_insn (rtx insn)
2933{

--- 27 unchanged lines hidden (view full) ---

2961/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2962
2963rtx
2964next_label (rtx insn)
2965{
2966 while (insn)
2967 {
2968 insn = NEXT_INSN (insn);
2969 if (insn == 0 || LABEL_P (insn))
2970 break;
2971 }
2972
2973 return insn;
2974}
2975
2976/* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
2977
2978rtx
2979prev_label (rtx insn)
2980{
2981 while (insn)
2982 {
2983 insn = PREV_INSN (insn);
2984 if (insn == 0 || LABEL_P (insn))
2985 break;
2986 }
2987
2988 return insn;
2989}
2990
2991/* Return the last label to mark the same position as LABEL. Return null
2992 if LABEL itself is null. */
2993
2994rtx
2995skip_consecutive_labels (rtx label)
2996{
2997 rtx insn;
2998
2999 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3000 if (LABEL_P (insn))
3001 label = insn;
3002
3003 return label;
3004}
3005
3006#ifdef HAVE_cc0
3007/* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3008 and REG_CC_USER notes so we can find it. */
3009
3010void
3011link_cc0_insns (rtx insn)
3012{
3013 rtx user = next_nonnote_insn (insn);
3014
3015 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3016 user = XVECEXP (PATTERN (user), 0, 0);
3017
3018 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3019 REG_NOTES (user));
3020 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3021}
3022
3023/* Return the next insn that uses CC0 after INSN, which is assumed to

--- 9 unchanged lines hidden (view full) ---

3033next_cc0_user (rtx insn)
3034{
3035 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3036
3037 if (note)
3038 return XEXP (note, 0);
3039
3040 insn = next_nonnote_insn (insn);
3041 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3042 insn = XVECEXP (PATTERN (insn), 0, 0);
3043
3044 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3045 return insn;
3046
3047 return 0;
3048}
3049

--- 4 unchanged lines hidden (view full) ---

3054prev_cc0_setter (rtx insn)
3055{
3056 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3057
3058 if (note)
3059 return XEXP (note, 0);
3060
3061 insn = prev_nonnote_insn (insn);
3062 gcc_assert (sets_cc0_p (PATTERN (insn)));
3063
3064 return insn;
3065}
3066#endif
3067
3068/* Increment the label uses for all labels present in rtx. */
3069
3070static void

--- 46 unchanged lines hidden (view full) ---

3117 probability = split_branch_probability;
3118
3119 seq = split_insns (pat, trial);
3120
3121 split_branch_probability = -1;
3122
3123 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3124 We may need to handle this specially. */
3125 if (after && BARRIER_P (after))
3126 {
3127 has_barrier = 1;
3128 after = NEXT_INSN (after);
3129 }
3130
3131 if (!seq)
3132 return trial;
3133

--- 8 unchanged lines hidden (view full) ---

3142 if (!NEXT_INSN (insn_last))
3143 break;
3144 insn_last = NEXT_INSN (insn_last);
3145 }
3146
3147 /* Mark labels. */
3148 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3149 {
3150 if (JUMP_P (insn))
3151 {
3152 mark_jump_label (PATTERN (insn), insn, 0);
3153 njumps++;
3154 if (probability != -1
3155 && any_condjump_p (insn)
3156 && !find_reg_note (insn, REG_BR_PROB, 0))
3157 {
3158 /* We can preserve the REG_BR_PROB notes only if exactly
3159 one jump is created, otherwise the machine description
3160 is responsible for this step using
3161 split_branch_probability variable. */
3162 gcc_assert (njumps == 1);
3163 REG_NOTES (insn)
3164 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3165 GEN_INT (probability),
3166 REG_NOTES (insn));
3167 }
3168 }
3169 }
3170
3171 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3172 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3173 if (CALL_P (trial))
3174 {
3175 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3176 if (CALL_P (insn))
3177 {
3178 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3179 while (*p)
3180 p = &XEXP (*p, 1);
3181 *p = CALL_INSN_FUNCTION_USAGE (trial);
3182 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3183 }
3184 }
3185
3186 /* Copy notes, particularly those related to the CFG. */
3187 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3188 {
3189 switch (REG_NOTE_KIND (note))
3190 {
3191 case REG_EH_REGION:
3192 insn = insn_last;
3193 while (insn != NULL_RTX)
3194 {
3195 if (CALL_P (insn)
3196 || (flag_non_call_exceptions && INSN_P (insn)
3197 && may_trap_p (PATTERN (insn))))
3198 REG_NOTES (insn)
3199 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3200 XEXP (note, 0),
3201 REG_NOTES (insn));
3202 insn = PREV_INSN (insn);
3203 }
3204 break;
3205
3206 case REG_NORETURN:
3207 case REG_SETJMP:
3208 insn = insn_last;
3209 while (insn != NULL_RTX)
3210 {
3211 if (CALL_P (insn))
3212 REG_NOTES (insn)
3213 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3214 XEXP (note, 0),
3215 REG_NOTES (insn));
3216 insn = PREV_INSN (insn);
3217 }
3218 break;
3219
3220 case REG_NON_LOCAL_GOTO:
3221 insn = insn_last;
3222 while (insn != NULL_RTX)
3223 {
3224 if (JUMP_P (insn))
3225 REG_NOTES (insn)
3226 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3227 XEXP (note, 0),
3228 REG_NOTES (insn));
3229 insn = PREV_INSN (insn);
3230 }
3231 break;
3232
3233 default:
3234 break;
3235 }
3236 }
3237
3238 /* If there are LABELS inside the split insns increment the
3239 usage count so we don't delete the label. */
3240 if (NONJUMP_INSN_P (trial))
3241 {
3242 insn = insn_last;
3243 while (insn != NULL_RTX)
3244 {
3245 if (NONJUMP_INSN_P (insn))
3246 mark_label_nuses (PATTERN (insn));
3247
3248 insn = PREV_INSN (insn);
3249 }
3250 }
3251
3252 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3253

--- 37 unchanged lines hidden (view full) ---

3291
3292#ifdef ENABLE_RTL_CHECKING
3293 if (insn
3294 && INSN_P (insn)
3295 && (returnjump_p (insn)
3296 || (GET_CODE (insn) == SET
3297 && SET_DEST (insn) == pc_rtx)))
3298 {
3299 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3300 debug_rtx (insn);
3301 }
3302#endif
3303
3304 return insn;
3305}
3306
3307/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3308
3309rtx
3310make_jump_insn_raw (rtx pattern)
3311{
3312 rtx insn;
3313
3314 insn = rtx_alloc (JUMP_INSN);
3315 INSN_UID (insn) = cur_insn_uid++;
3316
3317 PATTERN (insn) = pattern;

--- 52 unchanged lines hidden (view full) ---

3370 SEQUENCE. */
3371
3372void
3373add_insn_after (rtx insn, rtx after)
3374{
3375 rtx next = NEXT_INSN (after);
3376 basic_block bb;
3377
3378 gcc_assert (!optimize || !INSN_DELETED_P (after));
3379
3380 NEXT_INSN (insn) = next;
3381 PREV_INSN (insn) = after;
3382
3383 if (next)
3384 {
3385 PREV_INSN (next) = insn;
3386 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3387 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3388 }
3389 else if (last_insn == after)
3390 last_insn = insn;
3391 else
3392 {
3393 struct sequence_stack *stack = seq_stack;
3394 /* Scan all pending sequences too. */
3395 for (; stack; stack = stack->next)
3396 if (after == stack->last)
3397 {
3398 stack->last = insn;
3399 break;
3400 }
3401
3402 gcc_assert (stack);
3403 }
3404
3405 if (!BARRIER_P (after)
3406 && !BARRIER_P (insn)
3407 && (bb = BLOCK_FOR_INSN (after)))
3408 {
3409 set_block_for_insn (insn, bb);
3410 if (INSN_P (insn))
3411 bb->flags |= BB_DIRTY;
3412 /* Should not happen as first in the BB is always
3413 either NOTE or LABEL. */
3414 if (BB_END (bb) == after
3415 /* Avoid clobbering of structure when creating new BB. */
3416 && !BARRIER_P (insn)
3417 && (!NOTE_P (insn)
3418 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3419 BB_END (bb) = insn;
3420 }
3421
3422 NEXT_INSN (after) = insn;
3423 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3424 {
3425 rtx sequence = PATTERN (after);
3426 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3427 }
3428}
3429
3430/* Add INSN into the doubly-linked list before insn BEFORE. This and
3431 the previous should be the only functions called to insert an insn once
3432 delay slots have been filled since only they know how to update a
3433 SEQUENCE. */
3434
3435void
3436add_insn_before (rtx insn, rtx before)
3437{
3438 rtx prev = PREV_INSN (before);
3439 basic_block bb;
3440
3441 gcc_assert (!optimize || !INSN_DELETED_P (before));
3442
3443 PREV_INSN (insn) = prev;
3444 NEXT_INSN (insn) = before;
3445
3446 if (prev)
3447 {
3448 NEXT_INSN (prev) = insn;
3449 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3450 {
3451 rtx sequence = PATTERN (prev);
3452 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3453 }
3454 }
3455 else if (first_insn == before)
3456 first_insn = insn;
3457 else
3458 {
3459 struct sequence_stack *stack = seq_stack;
3460 /* Scan all pending sequences too. */
3461 for (; stack; stack = stack->next)
3462 if (before == stack->first)
3463 {
3464 stack->first = insn;
3465 break;
3466 }
3467
3468 gcc_assert (stack);
3469 }
3470
3471 if (!BARRIER_P (before)
3472 && !BARRIER_P (insn)
3473 && (bb = BLOCK_FOR_INSN (before)))
3474 {
3475 set_block_for_insn (insn, bb);
3476 if (INSN_P (insn))
3477 bb->flags |= BB_DIRTY;
3478 /* Should not happen as first in the BB is always either NOTE or
3479 LABEL. */
3480 gcc_assert (BB_HEAD (bb) != insn
3481 /* Avoid clobbering of structure when creating new BB. */
3482 || BARRIER_P (insn)
3483 || (NOTE_P (insn)
3484 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK));
3485 }
3486
3487 PREV_INSN (before) = insn;
3488 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3489 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3490}
3491
3492/* Remove an insn from its doubly-linked list. This function knows how
3493 to handle sequences. */
3494void
3495remove_insn (rtx insn)
3496{
3497 rtx next = NEXT_INSN (insn);
3498 rtx prev = PREV_INSN (insn);
3499 basic_block bb;
3500
3501 if (prev)
3502 {
3503 NEXT_INSN (prev) = next;
3504 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3505 {
3506 rtx sequence = PATTERN (prev);
3507 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3508 }
3509 }
3510 else if (first_insn == insn)
3511 first_insn = next;
3512 else
3513 {
3514 struct sequence_stack *stack = seq_stack;
3515 /* Scan all pending sequences too. */
3516 for (; stack; stack = stack->next)
3517 if (insn == stack->first)
3518 {
3519 stack->first = next;
3520 break;
3521 }
3522
3523 gcc_assert (stack);
3524 }
3525
3526 if (next)
3527 {
3528 PREV_INSN (next) = prev;
3529 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3530 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3531 }
3532 else if (last_insn == insn)
3533 last_insn = prev;
3534 else
3535 {
3536 struct sequence_stack *stack = seq_stack;
3537 /* Scan all pending sequences too. */
3538 for (; stack; stack = stack->next)
3539 if (insn == stack->last)
3540 {
3541 stack->last = prev;
3542 break;
3543 }
3544
3545 gcc_assert (stack);
3546 }
3547 if (!BARRIER_P (insn)
3548 && (bb = BLOCK_FOR_INSN (insn)))
3549 {
3550 if (INSN_P (insn))
3551 bb->flags |= BB_DIRTY;
3552 if (BB_HEAD (bb) == insn)
3553 {
3554 /* Never ever delete the basic block note without deleting whole
3555 basic block. */
3556 gcc_assert (!NOTE_P (insn));
3557 BB_HEAD (bb) = next;
3558 }
3559 if (BB_END (bb) == insn)
3560 BB_END (bb) = prev;
3561 }
3562}
3563
3564/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3565
3566void
3567add_function_usage_to (rtx call_insn, rtx call_fusage)
3568{
3569 gcc_assert (call_insn && CALL_P (call_insn));
3570
3571 /* Put the register usage information on the CALL. If there is already
3572 some usage information, put ours at the end. */
3573 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3574 {
3575 rtx link;
3576
3577 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;

--- 57 unchanged lines hidden (view full) ---

3635void
3636reorder_insns (rtx from, rtx to, rtx after)
3637{
3638 rtx prev = PREV_INSN (from);
3639 basic_block bb, bb2;
3640
3641 reorder_insns_nobb (from, to, after);
3642
3643 if (!BARRIER_P (after)
3644 && (bb = BLOCK_FOR_INSN (after)))
3645 {
3646 rtx x;
3647 bb->flags |= BB_DIRTY;
3648
3649 if (!BARRIER_P (from)
3650 && (bb2 = BLOCK_FOR_INSN (from)))
3651 {
3652 if (BB_END (bb2) == to)
3653 BB_END (bb2) = prev;
3654 bb2->flags |= BB_DIRTY;
3655 }
3656
3657 if (BB_END (bb) == after)
3658 BB_END (bb) = to;
3659
3660 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3661 if (!BARRIER_P (x))
3662 set_block_for_insn (x, bb);
3663 }
3664}
3665
3666/* Return the line note insn preceding INSN. */
3667
3668static rtx
3669find_line_note (rtx insn)
3670{
3671 if (no_line_numbers)
3672 return 0;
3673
3674 for (; insn; insn = PREV_INSN (insn))
3675 if (NOTE_P (insn)
3676 && NOTE_LINE_NUMBER (insn) >= 0)
3677 break;
3678
3679 return insn;
3680}
3681
3682
3683/* Emit insn(s) of given code and pattern
3684 at a specified place within the doubly-linked list.
3685
3686 All of the emit_foo global entry points accept an object
3687 X which is either an insn list or a PATTERN of a single
3688 instruction.
3689

--- 18 unchanged lines hidden (view full) ---

3708/* Make X be output before the instruction BEFORE. */
3709
3710rtx
3711emit_insn_before_noloc (rtx x, rtx before)
3712{
3713 rtx last = before;
3714 rtx insn;
3715
3716 gcc_assert (before);
3717
3718 if (x == NULL_RTX)
3719 return last;
3720
3721 switch (GET_CODE (x))
3722 {
3723 case INSN:
3724 case JUMP_INSN:

--- 8 unchanged lines hidden (view full) ---

3733 add_insn_before (insn, before);
3734 last = insn;
3735 insn = next;
3736 }
3737 break;
3738
3739#ifdef ENABLE_RTL_CHECKING
3740 case SEQUENCE:
3741 gcc_unreachable ();
3742 break;
3743#endif
3744
3745 default:
3746 last = make_insn_raw (x);
3747 add_insn_before (last, before);
3748 break;
3749 }

--- 4 unchanged lines hidden (view full) ---

3754/* Make an instruction with body X and code JUMP_INSN
3755 and output it before the instruction BEFORE. */
3756
3757rtx
3758emit_jump_insn_before_noloc (rtx x, rtx before)
3759{
3760 rtx insn, last = NULL_RTX;
3761
3762 gcc_assert (before);
3763
3764 switch (GET_CODE (x))
3765 {
3766 case INSN:
3767 case JUMP_INSN:
3768 case CALL_INSN:
3769 case CODE_LABEL:
3770 case BARRIER:

--- 5 unchanged lines hidden (view full) ---

3776 add_insn_before (insn, before);
3777 last = insn;
3778 insn = next;
3779 }
3780 break;
3781
3782#ifdef ENABLE_RTL_CHECKING
3783 case SEQUENCE:
3784 gcc_unreachable ();
3785 break;
3786#endif
3787
3788 default:
3789 last = make_jump_insn_raw (x);
3790 add_insn_before (last, before);
3791 break;
3792 }

--- 4 unchanged lines hidden (view full) ---

3797/* Make an instruction with body X and code CALL_INSN
3798 and output it before the instruction BEFORE. */
3799
3800rtx
3801emit_call_insn_before_noloc (rtx x, rtx before)
3802{
3803 rtx last = NULL_RTX, insn;
3804
3805 gcc_assert (before);
3806
3807 switch (GET_CODE (x))
3808 {
3809 case INSN:
3810 case JUMP_INSN:
3811 case CALL_INSN:
3812 case CODE_LABEL:
3813 case BARRIER:

--- 5 unchanged lines hidden (view full) ---

3819 add_insn_before (insn, before);
3820 last = insn;
3821 insn = next;
3822 }
3823 break;
3824
3825#ifdef ENABLE_RTL_CHECKING
3826 case SEQUENCE:
3827 gcc_unreachable ();
3828 break;
3829#endif
3830
3831 default:
3832 last = make_call_insn_raw (x);
3833 add_insn_before (last, before);
3834 break;
3835 }

--- 33 unchanged lines hidden (view full) ---

3869
3870/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
3871
3872rtx
3873emit_note_before (int subtype, rtx before)
3874{
3875 rtx note = rtx_alloc (NOTE);
3876 INSN_UID (note) = cur_insn_uid++;
3877#ifndef USE_MAPPED_LOCATION
3878 NOTE_SOURCE_FILE (note) = 0;
3879#endif
3880 NOTE_LINE_NUMBER (note) = subtype;
3881 BLOCK_FOR_INSN (note) = NULL;
3882
3883 add_insn_before (note, before);
3884 return note;
3885}
3886
3887/* Helper for emit_insn_after, handles lists of instructions
3888 efficiently. */
3889
3890static rtx emit_insn_after_1 (rtx, rtx);
3891
3892static rtx
3893emit_insn_after_1 (rtx first, rtx after)
3894{
3895 rtx last;
3896 rtx after_after;
3897 basic_block bb;
3898
3899 if (!BARRIER_P (after)
3900 && (bb = BLOCK_FOR_INSN (after)))
3901 {
3902 bb->flags |= BB_DIRTY;
3903 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3904 if (!BARRIER_P (last))
3905 set_block_for_insn (last, bb);
3906 if (!BARRIER_P (last))
3907 set_block_for_insn (last, bb);
3908 if (BB_END (bb) == after)
3909 BB_END (bb) = last;
3910 }
3911 else
3912 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3913 continue;
3914

--- 12 unchanged lines hidden (view full) ---

3927
3928/* Make X be output after the insn AFTER. */
3929
3930rtx
3931emit_insn_after_noloc (rtx x, rtx after)
3932{
3933 rtx last = after;
3934
3935 gcc_assert (after);
3936
3937 if (x == NULL_RTX)
3938 return last;
3939
3940 switch (GET_CODE (x))
3941 {
3942 case INSN:
3943 case JUMP_INSN:
3944 case CALL_INSN:
3945 case CODE_LABEL:
3946 case BARRIER:
3947 case NOTE:
3948 last = emit_insn_after_1 (x, after);
3949 break;
3950
3951#ifdef ENABLE_RTL_CHECKING
3952 case SEQUENCE:
3953 gcc_unreachable ();
3954 break;
3955#endif
3956
3957 default:
3958 last = make_insn_raw (x);
3959 add_insn_after (last, after);
3960 break;
3961 }

--- 21 unchanged lines hidden (view full) ---

3983/* Make an insn of code JUMP_INSN with body X
3984 and output it after the insn AFTER. */
3985
3986rtx
3987emit_jump_insn_after_noloc (rtx x, rtx after)
3988{
3989 rtx last;
3990
3991 gcc_assert (after);
3992
3993 switch (GET_CODE (x))
3994 {
3995 case INSN:
3996 case JUMP_INSN:
3997 case CALL_INSN:
3998 case CODE_LABEL:
3999 case BARRIER:
4000 case NOTE:
4001 last = emit_insn_after_1 (x, after);
4002 break;
4003
4004#ifdef ENABLE_RTL_CHECKING
4005 case SEQUENCE:
4006 gcc_unreachable ();
4007 break;
4008#endif
4009
4010 default:
4011 last = make_jump_insn_raw (x);
4012 add_insn_after (last, after);
4013 break;
4014 }

--- 4 unchanged lines hidden (view full) ---

4019/* Make an instruction with body X and code CALL_INSN
4020 and output it after the instruction AFTER. */
4021
4022rtx
4023emit_call_insn_after_noloc (rtx x, rtx after)
4024{
4025 rtx last;
4026
4027 gcc_assert (after);
4028
4029 switch (GET_CODE (x))
4030 {
4031 case INSN:
4032 case JUMP_INSN:
4033 case CALL_INSN:
4034 case CODE_LABEL:
4035 case BARRIER:
4036 case NOTE:
4037 last = emit_insn_after_1 (x, after);
4038 break;
4039
4040#ifdef ENABLE_RTL_CHECKING
4041 case SEQUENCE:
4042 gcc_unreachable ();
4043 break;
4044#endif
4045
4046 default:
4047 last = make_call_insn_raw (x);
4048 add_insn_after (last, after);
4049 break;
4050 }

--- 34 unchanged lines hidden (view full) ---

4085
4086/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4087
4088rtx
4089emit_note_after (int subtype, rtx after)
4090{
4091 rtx note = rtx_alloc (NOTE);
4092 INSN_UID (note) = cur_insn_uid++;
4093#ifndef USE_MAPPED_LOCATION
4094 NOTE_SOURCE_FILE (note) = 0;
4095#endif
4096 NOTE_LINE_NUMBER (note) = subtype;
4097 BLOCK_FOR_INSN (note) = NULL;
4098 add_insn_after (note, after);
4099 return note;
4100}
4101
4102/* Emit a copy of note ORIG after the insn AFTER. */
4103

--- 236 unchanged lines hidden (view full) ---

4340 add_insn (insn);
4341 last = insn;
4342 insn = next;
4343 }
4344 break;
4345
4346#ifdef ENABLE_RTL_CHECKING
4347 case SEQUENCE:
4348 gcc_unreachable ();
4349 break;
4350#endif
4351
4352 default:
4353 last = make_insn_raw (x);
4354 add_insn (last);
4355 break;
4356 }

--- 24 unchanged lines hidden (view full) ---

4381 add_insn (insn);
4382 last = insn;
4383 insn = next;
4384 }
4385 break;
4386
4387#ifdef ENABLE_RTL_CHECKING
4388 case SEQUENCE:
4389 gcc_unreachable ();
4390 break;
4391#endif
4392
4393 default:
4394 last = make_jump_insn_raw (x);
4395 add_insn (last);
4396 break;
4397 }

--- 17 unchanged lines hidden (view full) ---

4415 case CODE_LABEL:
4416 case BARRIER:
4417 case NOTE:
4418 insn = emit_insn (x);
4419 break;
4420
4421#ifdef ENABLE_RTL_CHECKING
4422 case SEQUENCE:
4423 gcc_unreachable ();
4424 break;
4425#endif
4426
4427 default:
4428 insn = make_call_insn_raw (x);
4429 add_insn (insn);
4430 break;
4431 }

--- 33 unchanged lines hidden (view full) ---

4465 of the doubly-linked list, but only if line-numbers are desired for
4466 debugging info and it doesn't match the previous one. */
4467
4468rtx
4469emit_line_note (location_t location)
4470{
4471 rtx note;
4472
4473#ifdef USE_MAPPED_LOCATION
4474 if (location == last_location)
4475 return NULL_RTX;
4476#else
4477 if (location.file && last_location.file
4478 && !strcmp (location.file, last_location.file)
4479 && location.line == last_location.line)
4480 return NULL_RTX;
4481#endif
4482 last_location = location;
4483
4484 if (no_line_numbers)
4485 {
4486 cur_insn_uid++;
4487 return NULL_RTX;
4488 }
4489
4490#ifdef USE_MAPPED_LOCATION
4491 note = emit_note ((int) location);
4492#else
4493 note = emit_note (location.line);
4494 NOTE_SOURCE_FILE (note) = location.file;
4495#endif
4496
4497 return note;
4498}
4499
4500/* Emit a copy of note ORIG. */
4501
4502rtx
4503emit_note_copy (rtx orig)

--- 35 unchanged lines hidden (view full) ---

4539}
4540
4541/* Cause next statement to emit a line note even if the line number
4542 has not changed. */
4543
4544void
4545force_next_line_note (void)
4546{
4547#ifdef USE_MAPPED_LOCATION
4548 last_location = -1;
4549#else
4550 last_location.line = -1;
4551#endif
4552}
4553
4554/* Place a note of KIND on insn INSN with DATUM as the datum. If a
4555 note of this type already exists, remove it first. */
4556
4557rtx
4558set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4559{

--- 4 unchanged lines hidden (view full) ---

4564 case REG_EQUAL:
4565 case REG_EQUIV:
4566 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4567 has multiple sets (some callers assume single_set
4568 means the insn only has one set, when in fact it
4569 means the insn only has one * useful * set). */
4570 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4571 {
4572 gcc_assert (!note);
4573 return NULL_RTX;
4574 }
4575
4576 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4577 It serves no useful purpose and breaks eliminate_regs. */
4578 if (GET_CODE (datum) == ASM_OPERANDS)
4579 return NULL_RTX;
4580 break;

--- 10 unchanged lines hidden (view full) ---

4591
4592 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4593 return REG_NOTES (insn);
4594}
4595
4596/* Return an indication of which type of insn should have X as a body.
4597 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4598
4599static enum rtx_code
4600classify_insn (rtx x)
4601{
4602 if (LABEL_P (x))
4603 return CODE_LABEL;
4604 if (GET_CODE (x) == CALL)
4605 return CALL_INSN;
4606 if (GET_CODE (x) == RETURN)
4607 return JUMP_INSN;
4608 if (GET_CODE (x) == SET)
4609 {
4610 if (SET_DEST (x) == pc_rtx)

--- 22 unchanged lines hidden (view full) ---

4633/* Emit the rtl pattern X as an appropriate kind of insn.
4634 If X is a label, it is simply added into the insn chain. */
4635
4636rtx
4637emit (rtx x)
4638{
4639 enum rtx_code code = classify_insn (x);
4640
4641 switch (code)
4642 {
4643 case CODE_LABEL:
4644 return emit_label (x);
4645 case INSN:
4646 return emit_insn (x);
4647 case JUMP_INSN:
4648 {
4649 rtx insn = emit_jump_insn (x);
4650 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4651 return emit_barrier ();
4652 return insn;
4653 }
4654 case CALL_INSN:
4655 return emit_call_insn (x);
4656 default:
4657 gcc_unreachable ();
4658 }
4659}
4660
4661/* Space for free sequence stack entries. */
4662static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
4663
4664/* Begin emitting insns to a sequence. If this sequence will contain
4665 something that might cause the compiler to pop arguments to function
4666 calls (because those pops have previously been deferred; see
4667 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
4668 before calling this function. That will ensure that the deferred
4669 pops are not accidentally emitted in the middle of this sequence. */
4670
4671void
4672start_sequence (void)
4673{
4674 struct sequence_stack *tem;
4675
4676 if (free_sequence_stack != NULL)
4677 {
4678 tem = free_sequence_stack;
4679 free_sequence_stack = tem->next;
4680 }
4681 else
4682 tem = ggc_alloc (sizeof (struct sequence_stack));
4683
4684 tem->next = seq_stack;
4685 tem->first = first_insn;
4686 tem->last = last_insn;
4687
4688 seq_stack = tem;
4689
4690 first_insn = 0;
4691 last_insn = 0;
4692}
4693
4694/* Set up the insn chain starting with FIRST as the current sequence,
4695 saving the previously current one. See the documentation for
4696 start_sequence for more information about how to use this function. */
4697
4698void
4699push_to_sequence (rtx first)
4700{
4701 rtx last;
4702
4703 start_sequence ();
4704
4705 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4706
4707 first_insn = first;
4708 last_insn = last;
4709}
4710
4711/* Set up the outer-level insn chain
4712 as the current sequence, saving the previously current one. */
4713
4714void
4715push_topmost_sequence (void)
4716{
4717 struct sequence_stack *stack, *top = NULL;
4718
4719 start_sequence ();
4720
4721 for (stack = seq_stack; stack; stack = stack->next)
4722 top = stack;
4723
4724 first_insn = top->first;
4725 last_insn = top->last;
4726}
4727
4728/* After emitting to the outer-level insn chain, update the outer-level
4729 insn chain, and restore the previous saved state. */
4730
4731void
4732pop_topmost_sequence (void)
4733{
4734 struct sequence_stack *stack, *top = NULL;
4735
4736 for (stack = seq_stack; stack; stack = stack->next)
4737 top = stack;
4738
4739 top->first = first_insn;
4740 top->last = last_insn;
4741
4742 end_sequence ();
4743}
4744
4745/* After emitting to a sequence, restore previous saved state.
4746
4747 To get the contents of the sequence just made, you must call
4748 `get_insns' *before* calling here.

--- 8 unchanged lines hidden (view full) ---

4757
4758void
4759end_sequence (void)
4760{
4761 struct sequence_stack *tem = seq_stack;
4762
4763 first_insn = tem->first;
4764 last_insn = tem->last;
4765 seq_stack = tem->next;
4766
4767 memset (tem, 0, sizeof (*tem));
4768 tem->next = free_sequence_stack;
4769 free_sequence_stack = tem;
4770}
4771
4772/* Return 1 if currently emitting into a sequence. */
4773
4774int
4775in_sequence_p (void)
4776{
4777 return seq_stack != 0;
4778}
4779
4780/* Put the various virtual registers into REGNO_REG_RTX. */
4781
4782static void
4783init_virtual_regs (struct emit_status *es)
4784{
4785 rtx *ptr = es->x_regno_reg_rtx;
4786 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4787 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4788 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4789 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4790 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;

--- 35 unchanged lines hidden (view full) ---

4826 RTX_CODE code;
4827 const char *format_ptr;
4828
4829 code = GET_CODE (orig);
4830
4831 switch (code)
4832 {
4833 case REG:
4834 case CONST_INT:
4835 case CONST_DOUBLE:
4836 case CONST_VECTOR:
4837 case SYMBOL_REF:
4838 case CODE_LABEL:
4839 case PC:
4840 case CC0:
4841 return orig;
4842 case CLOBBER:
4843 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
4844 return orig;
4845 break;
4846
4847 case SCRATCH:
4848 for (i = 0; i < copy_insn_n_scratches; i++)
4849 if (copy_insn_scratch_in[i] == orig)
4850 return copy_insn_scratch_out[i];
4851 break;
4852
4853 case CONST:

--- 9 unchanged lines hidden (view full) ---

4863 the constant address may need to be reloaded. If the mem is shared,
4864 then reloading one copy of this mem will cause all copies to appear
4865 to have been reloaded. */
4866
4867 default:
4868 break;
4869 }
4870
4871 /* Copy the various flags, fields, and other information. We assume
4872 that all fields need copying, and then clear the fields that should
4873 not be copied. That is the sensible default behavior, and forces
4874 us to explicitly document why we are *not* copying a flag. */
4875 copy = shallow_copy_rtx (orig);
4876
4877 /* We do not copy the USED flag, which is used as a mark bit during
4878 walks over the RTL. */
4879 RTX_FLAG (copy, used) = 0;
4880
4881 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
4882 if (INSN_P (orig))
4883 {
4884 RTX_FLAG (copy, jump) = 0;
4885 RTX_FLAG (copy, call) = 0;
4886 RTX_FLAG (copy, frame_related) = 0;
4887 }
4888
4889 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4890
4891 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4892 switch (*format_ptr++)
4893 {
4894 case 'e':
4895 if (XEXP (orig, i) != NULL)
4896 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
4897 break;
4898
4899 case 'E':
4900 case 'V':
4901 if (XVEC (orig, i) == orig_asm_constraints_vector)
4902 XVEC (copy, i) = copy_asm_constraints_vector;
4903 else if (XVEC (orig, i) == orig_asm_operands_vector)
4904 XVEC (copy, i) = copy_asm_operands_vector;
4905 else if (XVEC (orig, i) != NULL)
4906 {
4907 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4908 for (j = 0; j < XVECLEN (copy, i); j++)
4909 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
4910 }
4911 break;
4912
4913 case 't':
4914 case 'w':
4915 case 'i':
4916 case 's':
4917 case 'S':
4918 case 'u':
4919 case '0':
4920 /* These are left unchanged. */
4921 break;
4922
4923 default:
4924 gcc_unreachable ();
4925 }
4926
4927 if (code == SCRATCH)
4928 {
4929 i = copy_insn_n_scratches++;
4930 gcc_assert (i < MAX_RECOG_OPERANDS);
4931 copy_insn_scratch_in[i] = orig;
4932 copy_insn_scratch_out[i] = copy;
4933 }
4934 else if (code == ASM_OPERANDS)
4935 {
4936 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
4937 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
4938 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);

--- 25 unchanged lines hidden (view full) ---

4964void
4965init_emit (void)
4966{
4967 struct function *f = cfun;
4968
4969 f->emit = ggc_alloc (sizeof (struct emit_status));
4970 first_insn = NULL;
4971 last_insn = NULL;
4972 cur_insn_uid = 1;
4973 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
4974 last_location = UNKNOWN_LOCATION;
4975 first_label_num = label_num;
4976 seq_stack = NULL;
4977
4978 /* Init the tables that describe all the pseudo regs. */
4979
4980 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
4981
4982 f->emit->regno_pointer_align
4983 = ggc_alloc_cleared (f->emit->regno_pointer_align_length

--- 36 unchanged lines hidden (view full) ---

5020 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5021#endif
5022
5023#ifdef INIT_EXPANDERS
5024 INIT_EXPANDERS;
5025#endif
5026}
5027
5028/* Generate a vector constant for mode MODE and constant value CONSTANT. */
5029
5030static rtx
5031gen_const_vector (enum machine_mode mode, int constant)
5032{
5033 rtx tem;
5034 rtvec v;
5035 int units, i;
5036 enum machine_mode inner;
5037
5038 units = GET_MODE_NUNITS (mode);
5039 inner = GET_MODE_INNER (mode);
5040
5041 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5042
5043 v = rtvec_alloc (units);
5044
5045 /* We need to call this function after we set the scalar const_tiny_rtx
5046 entries. */
5047 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5048
5049 for (i = 0; i < units; ++i)
5050 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5051
5052 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5053 return tem;
5054}
5055
5056/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5057 all elements are zero, and the one vector when all elements are one. */
5058rtx
5059gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5060{
5061 enum machine_mode inner = GET_MODE_INNER (mode);
5062 int nunits = GET_MODE_NUNITS (mode);
5063 rtx x;
5064 int i;
5065
5066 /* Check to see if all of the elements have the same value. */
5067 x = RTVEC_ELT (v, nunits - 1);
5068 for (i = nunits - 2; i >= 0; i--)
5069 if (RTVEC_ELT (v, i) != x)
5070 break;
5071
5072 /* If the values are all the same, check to see if we can use one of the
5073 standard constant vectors. */
5074 if (i == -1)
5075 {
5076 if (x == CONST0_RTX (inner))
5077 return CONST0_RTX (mode);
5078 else if (x == CONST1_RTX (inner))
5079 return CONST1_RTX (mode);
5080 }
5081
5082 return gen_rtx_raw_CONST_VECTOR (mode, v);
5083}
5084
5085/* Create some permanent unique rtl objects shared between all functions.
5086 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5087
5088void
5089init_emit_once (int line_numbers)
5090{

--- 20 unchanged lines hidden (view full) ---

5111 no_line_numbers = ! line_numbers;
5112
5113 /* Compute the word and byte modes. */
5114
5115 byte_mode = VOIDmode;
5116 word_mode = VOIDmode;
5117 double_mode = VOIDmode;
5118
5119 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5120 mode != VOIDmode;
5121 mode = GET_MODE_WIDER_MODE (mode))
5122 {
5123 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5124 && byte_mode == VOIDmode)
5125 byte_mode = mode;
5126
5127 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5128 && word_mode == VOIDmode)
5129 word_mode = mode;
5130 }
5131
5132 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5133 mode != VOIDmode;
5134 mode = GET_MODE_WIDER_MODE (mode))
5135 {
5136 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5137 && double_mode == VOIDmode)
5138 double_mode = mode;
5139 }
5140
5141 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5142
5143 /* Assign register numbers to the globally defined register rtx.
5144 This must be done at runtime because the register number field
5145 is in a union and some compilers can't initialize unions. */
5146
5147 pc_rtx = gen_rtx_PC (VOIDmode);
5148 cc0_rtx = gen_rtx_CC0 (VOIDmode);
5149 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5150 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5151 if (hard_frame_pointer_rtx == 0)
5152 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5153 HARD_FRAME_POINTER_REGNUM);
5154 if (arg_pointer_rtx == 0)
5155 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5156 virtual_incoming_args_rtx =

--- 16 unchanged lines hidden (view full) ---

5173 call to push_function_context_to. This is needed by the Chill front
5174 end which calls push_function_context_to before the first call to
5175 init_function_start. */
5176 INIT_EXPANDERS;
5177#endif
5178
5179 /* Create the unique rtx's for certain rtx codes and operand values. */
5180
5181 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5182 tries to use these variables. */
5183 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5184 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5185 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5186
5187 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5188 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5189 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];

--- 4 unchanged lines hidden (view full) ---

5194 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5195 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5196 REAL_VALUE_FROM_INT (dconst3, 3, 0, double_mode);
5197 REAL_VALUE_FROM_INT (dconst10, 10, 0, double_mode);
5198 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5199 REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5200
5201 dconsthalf = dconst1;
5202 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5203
5204 real_arithmetic (&dconstthird, RDIV_EXPR, &dconst1, &dconst3);
5205
5206 /* Initialize mathematical constants for constant folding builtins.
5207 These constants need to be given to at least 160 bits precision. */
5208 real_from_string (&dconstpi,
5209 "3.1415926535897932384626433832795028841971693993751058209749445923078");
5210 real_from_string (&dconste,
5211 "2.7182818284590452353602874713526624977572470936999595749669676277241");
5212
5213 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
5214 {
5215 REAL_VALUE_TYPE *r =
5216 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5217
5218 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5219 mode != VOIDmode;
5220 mode = GET_MODE_WIDER_MODE (mode))
5221 const_tiny_rtx[i][(int) mode] =
5222 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5223
5224 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5225 mode != VOIDmode;
5226 mode = GET_MODE_WIDER_MODE (mode))
5227 const_tiny_rtx[i][(int) mode] =
5228 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5229
5230 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5231
5232 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5233 mode != VOIDmode;
5234 mode = GET_MODE_WIDER_MODE (mode))
5235 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5236
5237 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5238 mode != VOIDmode;
5239 mode = GET_MODE_WIDER_MODE (mode))
5240 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5241 }
5242
5243 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5244 mode != VOIDmode;
5245 mode = GET_MODE_WIDER_MODE (mode))
5246 {
5247 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5248 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5249 }
5250
5251 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5252 mode != VOIDmode;
5253 mode = GET_MODE_WIDER_MODE (mode))
5254 {
5255 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5256 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5257 }
5258
5259 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5260 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5261 const_tiny_rtx[0][i] = const0_rtx;
5262
5263 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5264 if (STORE_FLAG_VALUE == 1)
5265 const_tiny_rtx[1][(int) BImode] = const1_rtx;

--- 24 unchanged lines hidden (view full) ---

5290 static_chain_incoming_rtx = static_chain_rtx;
5291#endif
5292#endif
5293
5294 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5295 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5296}
5297
5298/* Produce exact duplicate of insn INSN after AFTER.
5299 Care updating of libcall regions if present. */
5300
5301rtx
5302emit_copy_of_insn_after (rtx insn, rtx after)
5303{
5304 rtx new;
5305 rtx note1, note2, link;

--- 13 unchanged lines hidden (view full) ---

5319 if (CALL_INSN_FUNCTION_USAGE (insn))
5320 CALL_INSN_FUNCTION_USAGE (new)
5321 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5322 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5323 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5324 break;
5325
5326 default:
5327 gcc_unreachable ();
5328 }
5329
5330 /* Update LABEL_NUSES. */
5331 mark_jump_label (PATTERN (new), new, 0);
5332
5333 INSN_LOCATOR (new) = INSN_LOCATOR (insn);
5334
5335 /* If the old insn is frame related, then so is the new one. This is
5336 primarily needed for IA-64 unwind info which marks epilogue insns,
5337 which may be duplicated by the basic block reordering code. */
5338 RTX_FRAME_RELATED_P (new) = RTX_FRAME_RELATED_P (insn);
5339
5340 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5341 make them. */
5342 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5343 if (REG_NOTE_KIND (link) != REG_LABEL)
5344 {
5345 if (GET_CODE (link) == EXPR_LIST)
5346 REG_NOTES (new)
5347 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),

--- 14 unchanged lines hidden (view full) ---

5362 p = PREV_INSN (p);
5363 XEXP (note1, 0) = p;
5364 XEXP (note2, 0) = new;
5365 }
5366 INSN_CODE (new) = INSN_CODE (insn);
5367 return new;
5368}
5369
5370static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5371rtx
5372gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5373{
5374 if (hard_reg_clobbers[mode][regno])
5375 return hard_reg_clobbers[mode][regno];
5376 else
5377 return (hard_reg_clobbers[mode][regno] =
5378 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5379}
5380
5381#include "gt-emit-rtl.h"