Deleted Added
sdiff udiff text old ( 18334 ) new ( 50397 )
full compact
1/* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 88, 92, 93, 94, 1995 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10

--- 65 unchanged lines hidden (view full) ---

76
77#include "config.h"
78#ifdef __STDC__
79#include <stdarg.h>
80#else
81#include <varargs.h>
82#endif
83
84/* Must precede rtl.h for FFS. */
85#include <stdio.h>
86
87#include "rtl.h"
88#include "flags.h"
89#include "regs.h"
90#include "hard-reg-set.h"
91#include "expr.h"
92#include "basic-block.h"
93#include "insn-config.h"
94#include "insn-flags.h"
95#include "insn-codes.h"
96#include "insn-attr.h"
97#include "recog.h"
98#include "real.h"
99
100/* It is not safe to use ordinary gen_lowpart in combine.
101 Use gen_lowpart_for_combine instead. See comments there. */
102#define gen_lowpart dont_use_gen_lowpart_you_dummy
103
104/* Number of attempts to combine instructions in this function. */
105
106static int combine_attempts;

--- 28 unchanged lines hidden (view full) ---

135 proves to be a bad idea because it makes it hard to compare
136 the dumps produced by earlier passes with those from later passes. */
137
138static int *uid_cuid;
139static int max_uid_cuid;
140
141/* Get the cuid of an insn. */
142
143#define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid_cuid \
144 ? (abort(), 0) \
145 : uid_cuid[INSN_UID (INSN)])
146
147/* Maximum register number, which is the size of the tables below. */
148
149static int combine_max_regno;
150
151/* Record last point of death of (hard or pseudo) register n. */
152
153static rtx *reg_last_death;

--- 40 unchanged lines hidden (view full) ---

194static HARD_REG_SET newpat_used_regs;
195
196/* This is an insn to which a LOG_LINKS entry has been added. If this
197 insn is the earlier than I2 or I3, combine should rescan starting at
198 that location. */
199
200static rtx added_links_insn;
201
202/* This is the value of undobuf.num_undo when we started processing this
203 substitution. This will prevent gen_rtx_combine from re-used a piece
204 from the previous expression. Doing so can produce circular rtl
205 structures. */
206
207static int previous_num_undos;
208
209/* Basic block number of the block in which we are performing combines. */
210static int this_basic_block;
211
212/* The next group of arrays allows the recording of the last value assigned
213 to (hard or pseudo) register n. We use this information to see if a
214 operation being processed is redundant given a prior operation performed
215 on the register. For example, an `and' with a constant is redundant if
216 all the zero bits are already known to be turned off.

--- 31 unchanged lines hidden (view full) ---

248
249 If an expression is found in the table containing a register which may
250 not validly appear in an expression, the register is replaced by
251 something that won't match, (clobber (const_int 0)).
252
253 reg_last_set_invalid[i] is set non-zero when register I is being assigned
254 to and reg_last_set_table_tick[i] == label_tick. */
255
256/* Record last value assigned to (hard or pseudo) register n. */
257
258static rtx *reg_last_set_value;
259
260/* Record the value of label_tick when the value for register n is placed in
261 reg_last_set_value[n]. */
262
263static int *reg_last_set_label;
264
265/* Record the value of label_tick when an expression involving register n
266 is placed in reg_last_set_value. */
267
268static int *reg_last_set_table_tick;
269
270/* Set non-zero if references to register n in expressions should not be
271 used. */
272
273static char *reg_last_set_invalid;
274
275/* Incremented for each label. */
276
277static int label_tick;
278
279/* Some registers that are set more than once and used in more than one
280 basic block are nevertheless always set in similar ways. For example,
281 a QImode register may be loaded from memory in two places on a machine
282 where byte loads zero extend.
283

--- 31 unchanged lines hidden (view full) ---

315static char *reg_last_set_sign_bit_copies;
316
317/* Record one modification to rtl structure
318 to be undone by storing old_contents into *where.
319 is_int is 1 if the contents are an int. */
320
321struct undo
322{
323 int is_int;
324 union {rtx r; int i;} old_contents;
325 union {rtx *r; int *i;} where;
326};
327
328/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
329 num_undo says how many are currently recorded.
330
331 storage is nonzero if we must undo the allocation of new storage.
332 The value of storage is what to pass to obfree.
333
334 other_insn is nonzero if we have modified some other insn in the process
335 of working on subst_insn. It must be verified too. */
336
337#define MAX_UNDO 50
338
339struct undobuf
340{
341 int num_undo;
342 char *storage;
343 struct undo undo[MAX_UNDO];
344 rtx other_insn;
345};
346
347static struct undobuf undobuf;
348
349/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
350 insn. The substitution can be undone by undo_all. If INTO is already
351 set to NEWVAL, do not record this change. Because computing NEWVAL might
352 also call SUBST, we have to compute it before we put anything into
353 the undo table. */
354
355#define SUBST(INTO, NEWVAL) \
356 do { rtx _new = (NEWVAL); \
357 if (undobuf.num_undo < MAX_UNDO) \
358 { \
359 undobuf.undo[undobuf.num_undo].is_int = 0; \
360 undobuf.undo[undobuf.num_undo].where.r = &INTO; \
361 undobuf.undo[undobuf.num_undo].old_contents.r = INTO; \
362 INTO = _new; \
363 if (undobuf.undo[undobuf.num_undo].old_contents.r != INTO) \
364 undobuf.num_undo++; \
365 } \
366 } while (0)
367
368/* Similar to SUBST, but NEWVAL is an int. INTO will normally be an XINT
369 expression.
370 Note that substitution for the value of a CONST_INT is not safe. */
371
372#define SUBST_INT(INTO, NEWVAL) \
373 do { if (undobuf.num_undo < MAX_UNDO) \
374{ \
375 undobuf.undo[undobuf.num_undo].is_int = 1; \
376 undobuf.undo[undobuf.num_undo].where.i = (int *) &INTO; \
377 undobuf.undo[undobuf.num_undo].old_contents.i = INTO; \
378 INTO = NEWVAL; \
379 if (undobuf.undo[undobuf.num_undo].old_contents.i != INTO) \
380 undobuf.num_undo++; \
381 } \
382 } while (0)
383
384/* Number of times the pseudo being substituted for
385 was found and replaced. */
386
387static int n_occurrences;
388
389static void init_reg_last_arrays PROTO(());
390static void setup_incoming_promotions PROTO(());
391static void set_nonzero_bits_and_sign_copies PROTO((rtx, rtx));
392static int can_combine_p PROTO((rtx, rtx, rtx, rtx, rtx *, rtx *));
393static int combinable_i3pat PROTO((rtx, rtx *, rtx, rtx, int, rtx *));
394static rtx try_combine PROTO((rtx, rtx, rtx));
395static void undo_all PROTO((void));
396static rtx *find_split_point PROTO((rtx *, rtx));
397static rtx subst PROTO((rtx, rtx, rtx, int, int));
398static rtx simplify_rtx PROTO((rtx, enum machine_mode, int, int));
399static rtx simplify_if_then_else PROTO((rtx));
400static rtx simplify_set PROTO((rtx));

--- 4 unchanged lines hidden (view full) ---

405 int, int, int));
406static rtx extract_left_shift PROTO((rtx, int));
407static rtx make_compound_operation PROTO((rtx, enum rtx_code));
408static int get_pos_from_mask PROTO((unsigned HOST_WIDE_INT, int *));
409static rtx force_to_mode PROTO((rtx, enum machine_mode,
410 unsigned HOST_WIDE_INT, rtx, int));
411static rtx if_then_else_cond PROTO((rtx, rtx *, rtx *));
412static rtx known_cond PROTO((rtx, enum rtx_code, rtx, rtx));
413static rtx make_field_assignment PROTO((rtx));
414static rtx apply_distributive_law PROTO((rtx));
415static rtx simplify_and_const_int PROTO((rtx, enum machine_mode, rtx,
416 unsigned HOST_WIDE_INT));
417static unsigned HOST_WIDE_INT nonzero_bits PROTO((rtx, enum machine_mode));
418static int num_sign_bit_copies PROTO((rtx, enum machine_mode));
419static int merge_outer_ops PROTO((enum rtx_code *, HOST_WIDE_INT *,
420 enum rtx_code, HOST_WIDE_INT,

--- 9 unchanged lines hidden (view full) ---

430static rtx gen_unary PROTO((enum rtx_code, enum machine_mode,
431 enum machine_mode, rtx));
432static enum rtx_code simplify_comparison PROTO((enum rtx_code, rtx *, rtx *));
433static int reversible_comparison_p PROTO((rtx));
434static void update_table_tick PROTO((rtx));
435static void record_value_for_reg PROTO((rtx, rtx, rtx));
436static void record_dead_and_set_regs_1 PROTO((rtx, rtx));
437static void record_dead_and_set_regs PROTO((rtx));
438static int get_last_value_validate PROTO((rtx *, int, int));
439static rtx get_last_value PROTO((rtx));
440static int use_crosses_set_p PROTO((rtx, int));
441static void reg_dead_at_p_1 PROTO((rtx, rtx));
442static int reg_dead_at_p PROTO((rtx, rtx));
443static void move_deaths PROTO((rtx, int, rtx, rtx *));
444static int reg_bitfield_target_p PROTO((rtx, rtx));
445static void distribute_notes PROTO((rtx, rtx, rtx, rtx, rtx, rtx));
446static void distribute_links PROTO((rtx));
447static void mark_used_regs_combine PROTO((rtx));
448
449/* Main entry point for combiner. F is the first insn of the function.
450 NREGS is the first unused pseudo-reg number. */
451
452void
453combine_instructions (f, nregs)
454 rtx f;
455 int nregs;
456{
457 register rtx insn, next, prev;
458 register int i;
459 register rtx links, nextlinks;
460
461 combine_attempts = 0;
462 combine_merges = 0;
463 combine_extras = 0;
464 combine_successes = 0;
465 undobuf.num_undo = previous_num_undos = 0;
466
467 combine_max_regno = nregs;
468
469 reg_nonzero_bits
470 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
471 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
472
473 bzero ((char *) reg_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));

--- 56 unchanged lines hidden (view full) ---

530 uid_cuid[INSN_UID (insn)] = ++i;
531 subst_low_cuid = i;
532 subst_insn = insn;
533
534 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
535 {
536 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies);
537 record_dead_and_set_regs (insn);
538 }
539
540 if (GET_CODE (insn) == CODE_LABEL)
541 label_tick++;
542 }
543
544 nonzero_sign_valid = 1;
545

--- 143 unchanged lines hidden (view full) ---

689 rtx reg;
690 enum machine_mode mode;
691 int unsignedp;
692 rtx first = get_insns ();
693
694 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
695 if (FUNCTION_ARG_REGNO_P (regno)
696 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
697 record_value_for_reg (reg, first,
698 gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
699 GET_MODE (reg),
700 gen_rtx (CLOBBER, mode, const0_rtx)));
701#endif
702}
703
704/* Called via note_stores. If X is a pseudo that is used in more than
705 one basic block, is narrower that HOST_BITS_PER_WIDE_INT, and is being
706 set, record what bits are known zero. If we are clobbering X,
707 ignore this "set" because the clobbered value won't be used.
708
709 If we are setting only a portion of X and we can't figure out what
710 portion, assume all bits will be used since we don't know what will
711 be happening.
712
713 Similarly, set how many bits of X are known to be copies of the sign bit
714 at all locations in the function. This is the smallest number implied
715 by any set of X. */
716
717static void
718set_nonzero_bits_and_sign_copies (x, set)
719 rtx x;
720 rtx set;
721{
722 int num;
723
724 if (GET_CODE (x) == REG
725 && REGNO (x) >= FIRST_PSEUDO_REGISTER
726 && reg_n_sets[REGNO (x)] > 1
727 && reg_basic_block[REGNO (x)] < 0
728 /* If this register is undefined at the start of the file, we can't
729 say what its contents were. */
730 && ! (basic_block_live_at_start[0][REGNO (x) / REGSET_ELT_BITS]
731 & ((REGSET_ELT_TYPE) 1 << (REGNO (x) % REGSET_ELT_BITS)))
732 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
733 {
734 if (GET_CODE (set) == CLOBBER)
735 {
736 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
737 reg_sign_bit_copies[REGNO (x)] = 0;
738 return;
739 }
740
741 /* If this is a complex assignment, see if we can convert it into a
742 simple assignment. */
743 set = expand_field_assignment (set);
744
745 /* If this is a simple assignment, or we have a paradoxical SUBREG,

--- 33 unchanged lines hidden (view full) ---

779 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
780 if (reg_sign_bit_copies[REGNO (x)] == 0
781 || reg_sign_bit_copies[REGNO (x)] > num)
782 reg_sign_bit_copies[REGNO (x)] = num;
783 }
784 else
785 {
786 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
787 reg_sign_bit_copies[REGNO (x)] = 0;
788 }
789 }
790}
791
792/* See if INSN can be combined into I3. PRED and SUCC are optionally
793 insns that were previously combined into I3 or that will be combined
794 into the merger of INSN and I3.
795

--- 7 unchanged lines hidden (view full) ---

803can_combine_p (insn, i3, pred, succ, pdest, psrc)
804 rtx insn;
805 rtx i3;
806 rtx pred, succ;
807 rtx *pdest, *psrc;
808{
809 int i;
810 rtx set = 0, src, dest;
811 rtx p, link;
812 int all_adjacent = (succ ? (next_active_insn (insn) == succ
813 && next_active_insn (succ) == i3)
814 : next_active_insn (insn) == i3);
815
816 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
817 or a PARALLEL consisting of such a SET and CLOBBERs.
818
819 If INSN has CLOBBER parallel parts, ignore them for our processing.

--- 14 unchanged lines hidden (view full) ---

834 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
835 {
836 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
837 {
838 rtx elt = XVECEXP (PATTERN (insn), 0, i);
839
840 switch (GET_CODE (elt))
841 {
842 /* We can ignore CLOBBERs. */
843 case CLOBBER:
844 break;
845
846 case SET:
847 /* Ignore SETs whose result isn't used but not those that
848 have side-effects. */
849 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))

--- 84 unchanged lines hidden (view full) ---

934 {
935 /* If register alignment is being enforced for multi-word items in all
936 cases except for parameters, it is possible to have a register copy
937 insn referencing a hard register that is not allowed to contain the
938 mode being copied and which would not be valid as an operand of most
939 insns. Eliminate this problem by not combining with such an insn.
940
941 Also, on some machines we don't want to extend the life of a hard
942 register. */
943
944 if (GET_CODE (src) == REG
945 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
946 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
947 /* Don't extend the life of a hard register unless it is
948 user variable (if we have few registers) or it can't
949 fit into the desired register (meaning something special
950 is going on). */
951 || (REGNO (src) < FIRST_PSEUDO_REGISTER
952 && (! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src))
953#ifdef SMALL_REGISTER_CLASSES
954 || ! REG_USERVAR_P (src)
955#endif
956 ))))
957 return 0;
958 }
959 else if (GET_CODE (dest) != CC0)
960 return 0;
961
962 /* Don't substitute for a register intended as a clobberable operand.
963 Similarly, don't substitute an expression containing a register that
964 will be clobbered in I3. */
965 if (GET_CODE (PATTERN (i3)) == PARALLEL)
966 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
967 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
968 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
969 src)
970 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
971 return 0;
972
973 /* If INSN contains anything volatile, or is an `asm' (whether volatile
974 or not), reject, unless nothing volatile comes between it and I3,
975 with the exception of SUCC. */
976
977 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
978 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
979 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
980 && p != succ && volatile_refs_p (PATTERN (p)))
981 return 0;
982
983 /* If there are any volatile insns between INSN and I3, reject, because
984 they might affect machine state. */
985
986 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
987 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
988 && p != succ && volatile_insn_p (PATTERN (p)))
989 return 0;
990

--- 32 unchanged lines hidden (view full) ---

1023 to be allowed. */
1024
1025 *pdest = dest;
1026 *psrc = src;
1027
1028 return 1;
1029}
1030
1031/* LOC is the location within I3 that contains its pattern or the component
1032 of a PARALLEL of the pattern. We validate that it is valid for combining.
1033
1034 One problem is if I3 modifies its output, as opposed to replacing it
1035 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1036 so would produce an insn that is not equivalent to the original insns.
1037
1038 Consider:

--- 12 unchanged lines hidden (view full) ---

1051 We can also run into a problem if I2 sets a register that I1
1052 uses and I1 gets directly substituted into I3 (not via I2). In that
1053 case, we would be getting the wrong value of I2DEST into I3, so we
1054 must reject the combination. This case occurs when I2 and I1 both
1055 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1056 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1057 of a SET must prevent combination from occurring.
1058
1059 On machines where SMALL_REGISTER_CLASSES is defined, we don't combine
1060 if the destination of a SET is a hard register that isn't a user
1061 variable.
1062
1063 Before doing the above check, we first try to expand a field assignment
1064 into a set of logical operations.
1065
1066 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1067 we place a register that is both set and used within I3. If more than one

--- 12 unchanged lines hidden (view full) ---

1080{
1081 rtx x = *loc;
1082
1083 if (GET_CODE (x) == SET)
1084 {
1085 rtx set = expand_field_assignment (x);
1086 rtx dest = SET_DEST (set);
1087 rtx src = SET_SRC (set);
1088 rtx inner_dest = dest, inner_src = src;
1089
1090 SUBST (*loc, set);
1091
1092 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1093 || GET_CODE (inner_dest) == SUBREG
1094 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1095 inner_dest = XEXP (inner_dest, 0);
1096

--- 21 unchanged lines hidden (view full) ---

1118 return 0;
1119#endif
1120
1121 /* Check for the case where I3 modifies its output, as
1122 discussed above. */
1123 if ((inner_dest != dest
1124 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1125 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1126 /* This is the same test done in can_combine_p except that we
1127 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
1128 CALL operation. */
1129 || (GET_CODE (inner_dest) == REG
1130 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1131 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1132 GET_MODE (inner_dest))
1133#ifdef SMALL_REGISTER_CLASSES
1134 || (GET_CODE (src) != CALL && ! REG_USERVAR_P (inner_dest))
1135#endif
1136 ))
1137 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1138 return 0;
1139
1140 /* If DEST is used in I3, it is being killed in this insn,
1141 so record that for later.
1142 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1143 STACK_POINTER_REGNUM, since these are always considered to be
1144 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */

--- 90 unchanged lines hidden (view full) ---

1235 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1236 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1237 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
1238 || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
1239 return 0;
1240
1241 combine_attempts++;
1242
1243 undobuf.num_undo = previous_num_undos = 0;
1244 undobuf.other_insn = 0;
1245
1246 /* Save the current high-water-mark so we can free storage if we didn't
1247 accept this combination. */
1248 undobuf.storage = (char *) oballoc (0);
1249
1250 /* Reset the hard register usage information. */
1251 CLEAR_HARD_REG_SET (newpat_used_regs);

--- 16 unchanged lines hidden (view full) ---

1268 We make very conservative checks below and only try to handle the
1269 most common cases of this. For example, we only handle the case
1270 where I2 and I3 are adjacent to avoid making difficult register
1271 usage tests. */
1272
1273 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1274 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1275 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1276#ifdef SMALL_REGISTER_CLASSES
1277 && (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1278 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1279 || REG_USERVAR_P (SET_DEST (PATTERN (i3))))
1280#endif
1281 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1282 && GET_CODE (PATTERN (i2)) == PARALLEL
1283 && ! side_effects_p (SET_DEST (PATTERN (i3)))
1284 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1285 below would need to check what is inside (and reg_overlap_mentioned_p
1286 doesn't support those codes anyway). Don't allow those destinations;
1287 the resulting insn isn't likely to be recognized anyway. */
1288 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT

--- 10 unchanged lines hidden (view full) ---

1299 (parallel [(set (mem (reg 69)) ...)
1300 (set (reg 69) ...)])
1301 which is not well-defined as to order of actions.
1302 (Besides, reload can't handle output reloads for this.)
1303
1304 The problem can also happen if the dest of I3 is a memory ref,
1305 if another dest in I2 is an indirect memory ref. */
1306 for (i = 0; i < XVECLEN (p2, 0); i++)
1307 if (GET_CODE (XVECEXP (p2, 0, i)) == SET
1308 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1309 SET_DEST (XVECEXP (p2, 0, i))))
1310 break;
1311
1312 if (i == XVECLEN (p2, 0))
1313 for (i = 0; i < XVECLEN (p2, 0); i++)
1314 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1315 {

--- 50 unchanged lines hidden (view full) ---

1366 if (i == 1)
1367 {
1368 /* We make I1 with the same INSN_UID as I2. This gives it
1369 the same INSN_CUID for value tracking. Our fake I1 will
1370 never appear in the insn stream so giving it the same INSN_UID
1371 as I2 will not cause a problem. */
1372
1373 subst_prev_insn = i1
1374 = gen_rtx (INSN, VOIDmode, INSN_UID (i2), 0, i2,
1375 XVECEXP (PATTERN (i2), 0, 1), -1, 0, 0);
1376
1377 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1378 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1379 SET_DEST (PATTERN (i1)));
1380 }
1381 }
1382#endif
1383

--- 80 unchanged lines hidden (view full) ---

1464 /* If the set in I2 needs to be kept around, we must make a copy of
1465 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1466 PATTERN (I2), we are only substituting for the original I1DEST, not into
1467 an already-substituted copy. This also prevents making self-referential
1468 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1469 I2DEST. */
1470
1471 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1472 ? gen_rtx (SET, VOIDmode, i2dest, i2src)
1473 : PATTERN (i2));
1474
1475 if (added_sets_2)
1476 i2pat = copy_rtx (i2pat);
1477
1478 combine_merges++;
1479
1480 /* Substitute in the latest insn for the regs set by the earlier ones. */

--- 27 unchanged lines hidden (view full) ---

1508 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1509 }
1510 else
1511 {
1512 subst_low_cuid = INSN_CUID (i2);
1513 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1514 }
1515
1516 previous_num_undos = undobuf.num_undo;
1517 }
1518
1519#ifndef HAVE_cc0
1520 /* Many machines that don't use CC0 have insns that can both perform an
1521 arithmetic operation and set the condition code. These operations will
1522 be represented as a PARALLEL with the first element of the vector
1523 being a COMPARE of an arithmetic operation with the constant zero.
1524 The second element of the vector will set some pseudo to the result
1525 of the same arithmetic operation. If we simplify the COMPARE, we won't
1526 match such a pattern and so will generate an extra insn. Here we test
1527 for this case, where both the comparison and the operation result are
1528 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1529 I2SRC. Later we will make the PARALLEL that contains I2. */
1530
1531 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1532 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1533 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1534 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1535 {
1536 rtx *cc_use;
1537 enum machine_mode compare_mode;
1538
1539 newpat = PATTERN (i3);
1540 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1541
1542 i2_is_used = 1;
1543
1544#ifdef EXTRA_CC_MODES
1545 /* See if a COMPARE with the operand we substituted in should be done

--- 4 unchanged lines hidden (view full) ---

1550 if (undobuf.other_insn == 0
1551 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1552 &undobuf.other_insn))
1553 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1554 i2src, const0_rtx))
1555 != GET_MODE (SET_DEST (newpat))))
1556 {
1557 int regno = REGNO (SET_DEST (newpat));
1558 rtx new_dest = gen_rtx (REG, compare_mode, regno);
1559
1560 if (regno < FIRST_PSEUDO_REGISTER
1561 || (reg_n_sets[regno] == 1 && ! added_sets_2
1562 && ! REG_USERVAR_P (SET_DEST (newpat))))
1563 {
1564 if (regno >= FIRST_PSEUDO_REGISTER)
1565 SUBST (regno_reg_rtx[regno], new_dest);
1566
1567 SUBST (SET_DEST (newpat), new_dest);
1568 SUBST (XEXP (*cc_use, 0), new_dest);
1569 SUBST (SET_SRC (newpat),

--- 12 unchanged lines hidden (view full) ---

1582
1583 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1584 need to make a unique copy of I2SRC each time we substitute it
1585 to avoid self-referential rtl. */
1586
1587 subst_low_cuid = INSN_CUID (i2);
1588 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1589 ! i1_feeds_i3 && i1dest_in_i1src);
1590 previous_num_undos = undobuf.num_undo;
1591
1592 /* Record whether i2's body now appears within i3's body. */
1593 i2_is_used = n_occurrences;
1594 }
1595
1596 /* If we already got a failure, don't try to do more. Otherwise,
1597 try to substitute in I1 if we have it. */
1598
1599 if (i1 && GET_CODE (newpat) != CLOBBER)
1600 {
1601 /* Before we can do this substitution, we must redo the test done
1602 above (see detailed comments there) that ensures that I1DEST
1603 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1604
1605 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1606 0, NULL_PTR))
1607 {
1608 undo_all ();
1609 return 0;
1610 }
1611
1612 n_occurrences = 0;
1613 subst_low_cuid = INSN_CUID (i1);
1614 newpat = subst (newpat, i1dest, i1src, 0, 0);
1615 previous_num_undos = undobuf.num_undo;
1616 }
1617
1618 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1619 to count all the ways that I2SRC and I1SRC can be used. */
1620 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
1621 && i2_is_used + added_sets_2 > 1)
1622 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
1623 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)

--- 20 unchanged lines hidden (view full) ---

1644 if (added_sets_1 || added_sets_2)
1645 {
1646 combine_extras++;
1647
1648 if (GET_CODE (newpat) == PARALLEL)
1649 {
1650 rtvec old = XVEC (newpat, 0);
1651 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1652 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1653 bcopy ((char *) &old->elem[0], (char *) &XVECEXP (newpat, 0, 0),
1654 sizeof (old->elem[0]) * old->num_elem);
1655 }
1656 else
1657 {
1658 rtx old = newpat;
1659 total_sets = 1 + added_sets_1 + added_sets_2;
1660 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1661 XVECEXP (newpat, 0, 0) = old;
1662 }
1663
1664 if (added_sets_1)
1665 XVECEXP (newpat, 0, --total_sets)
1666 = (GET_CODE (PATTERN (i1)) == PARALLEL
1667 ? gen_rtx (SET, VOIDmode, i1dest, i1src) : PATTERN (i1));
1668
1669 if (added_sets_2)
1670 {
1671 /* If there is no I1, use I2's body as is. We used to also not do
1672 the subst call below if I2 was substituted into I3,
1673 but that could lose a simplification. */
1674 if (i1 == 0)
1675 XVECEXP (newpat, 0, --total_sets) = i2pat;

--- 80 unchanged lines hidden (view full) ---

1756 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
1757 {
1758 /* If I2DEST is a hard register or the only use of a pseudo,
1759 we can change its mode. */
1760 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
1761 && GET_MODE (SET_DEST (newpat)) != VOIDmode
1762 && GET_CODE (i2dest) == REG
1763 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1764 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1765 && ! REG_USERVAR_P (i2dest))))
1766 ni2dest = gen_rtx (REG, GET_MODE (SET_DEST (newpat)),
1767 REGNO (i2dest));
1768
1769 m_split = split_insns (gen_rtx (PARALLEL, VOIDmode,
1770 gen_rtvec (2, newpat,
1771 gen_rtx (CLOBBER,
1772 VOIDmode,
1773 ni2dest))),
1774 i3);
1775 }
1776
1777 if (m_split && GET_CODE (m_split) == SEQUENCE
1778 && XVECLEN (m_split, 0) == 2
1779 && (next_real_insn (i2) == i3
1780 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1781 INSN_CUID (i2))))
1782 {

--- 10 unchanged lines hidden (view full) ---

1793
1794 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1795 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1796
1797 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes,
1798 &i2_scratches);
1799
1800 /* If I2 or I3 has multiple SETs, we won't know how to track
1801 register status, so don't use these insns. */
1802
1803 if (i2_code_number >= 0 && i2set && i3set)
1804 insn_code_number = recog_for_combine (&newi3pat, i3, &new_i3_notes,
1805 &i3_scratches);
1806 if (insn_code_number >= 0)
1807 newpat = newi3pat;
1808
1809 /* It is possible that both insns now set the destination of I3.
1810 If so, we must show an extra use of it. */
1811
1812 if (insn_code_number >= 0 && GET_CODE (SET_DEST (i3set)) == REG
1813 && GET_CODE (SET_DEST (i2set)) == REG
1814 && REGNO (SET_DEST (i3set)) == REGNO (SET_DEST (i2set)))
1815 reg_n_sets[REGNO (SET_DEST (i2set))]++;
1816 }
1817
1818 /* If we can split it and use I2DEST, go ahead and see if that
1819 helps things be recognized. Verify that none of the registers
1820 are set between I2 and I3. */
1821 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
1822#ifdef HAVE_cc0
1823 && GET_CODE (i2dest) == REG
1824#endif
1825 /* We need I2DEST in the proper mode. If it is a hard register
1826 or the only use of a pseudo, we can change its mode. */
1827 && (GET_MODE (*split) == GET_MODE (i2dest)
1828 || GET_MODE (*split) == VOIDmode
1829 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1830 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1831 && ! REG_USERVAR_P (i2dest)))
1832 && (next_real_insn (i2) == i3
1833 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1834 /* We can't overwrite I2DEST if its value is still used by
1835 NEWPAT. */
1836 && ! reg_referenced_p (i2dest, newpat))
1837 {
1838 rtx newdest = i2dest;
1839 enum rtx_code split_code = GET_CODE (*split);
1840 enum machine_mode split_mode = GET_MODE (*split);
1841
1842 /* Get NEWDEST as a register in the proper mode. We have already
1843 validated that we can do this. */
1844 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
1845 {
1846 newdest = gen_rtx (REG, split_mode, REGNO (i2dest));
1847
1848 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1849 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
1850 }
1851
1852 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1853 an ASHIFT. This can occur if it was inside a PLUS and hence
1854 appeared to be a memory address. This is a kludge. */

--- 96 unchanged lines hidden (view full) ---

1951 to I3. No other part of combine.c makes such a transformation.
1952
1953 The new I3 will have a destination that was previously the
1954 destination of I1 or I2 and which was used in i2 or I3. Call
1955 distribute_links to make a LOG_LINK from the next use of
1956 that destination. */
1957
1958 PATTERN (i3) = newpat;
1959 distribute_links (gen_rtx (INSN_LIST, VOIDmode, i3, NULL_RTX));
1960
1961 /* I3 now uses what used to be its destination and which is
1962 now I2's destination. That means we need a LOG_LINK from
1963 I3 to I2. But we used to have one, so we still will.
1964
1965 However, some later insn might be using I2's dest and have
1966 a LOG_LINK pointing at I3. We must remove this link.
1967 The simplest way to remove the link is to point it at I1,

--- 37 unchanged lines hidden (view full) ---

2005 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
2006 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2007 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2008 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2009 XVECEXP (newpat, 0, 0))
2010 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2011 XVECEXP (newpat, 0, 1)))
2012 {
2013 newi2pat = XVECEXP (newpat, 0, 1);
2014 newpat = XVECEXP (newpat, 0, 0);
2015
2016 i2_code_number
2017 = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
2018
2019 if (i2_code_number >= 0)
2020 insn_code_number
2021 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
2022 }

--- 35 unchanged lines hidden (view full) ---

2058 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2059 {
2060 next = XEXP (note, 1);
2061
2062 if (REG_NOTE_KIND (note) == REG_UNUSED
2063 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
2064 {
2065 if (GET_CODE (XEXP (note, 0)) == REG)
2066 reg_n_deaths[REGNO (XEXP (note, 0))]--;
2067
2068 remove_note (undobuf.other_insn, note);
2069 }
2070 }
2071
2072 for (note = new_other_notes; note; note = XEXP (note, 1))
2073 if (GET_CODE (XEXP (note, 0)) == REG)
2074 reg_n_deaths[REGNO (XEXP (note, 0))]++;
2075
2076 distribute_notes (new_other_notes, undobuf.other_insn,
2077 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
2078 }
2079
2080 /* We now know that we can do this combination. Merge the insns and
2081 update the status of registers and LOG_LINKS. */
2082
2083 {
2084 rtx i3notes, i2notes, i1notes = 0;
2085 rtx i3links, i2links, i1links = 0;
2086 rtx midnotes = 0;
2087 register int regno;
2088 /* Compute which registers we expect to eliminate. */
2089 rtx elim_i2 = (newi2pat || i2dest_in_i2src || i2dest_in_i1src
2090 ? 0 : i2dest);
2091 rtx elim_i1 = i1 == 0 || i1dest_in_i1src ? 0 : i1dest;
2092
2093 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2094 clear them. */
2095 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2096 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2097 if (i1)
2098 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2099

--- 81 unchanged lines hidden (view full) ---

2181 LOG_LINKS (i1) = 0;
2182 REG_NOTES (i1) = 0;
2183 PUT_CODE (i1, NOTE);
2184 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2185 NOTE_SOURCE_FILE (i1) = 0;
2186 }
2187
2188 /* Get death notes for everything that is now used in either I3 or
2189 I2 and used to die in a previous insn. */
2190
2191 move_deaths (newpat, i1 ? INSN_CUID (i1) : INSN_CUID (i2), i3, &midnotes);
2192 if (newi2pat)
2193 move_deaths (newi2pat, INSN_CUID (i1), i2, &midnotes);
2194
2195 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2196 if (i3notes)
2197 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2198 elim_i2, elim_i1);
2199 if (i2notes)
2200 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2201 elim_i2, elim_i1);

--- 8 unchanged lines hidden (view full) ---

2210 know these are REG_UNUSED and want them to go to the desired insn,
2211 so we always pass it as i3. We have not counted the notes in
2212 reg_n_deaths yet, so we need to do so now. */
2213
2214 if (newi2pat && new_i2_notes)
2215 {
2216 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2217 if (GET_CODE (XEXP (temp, 0)) == REG)
2218 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2219
2220 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2221 }
2222
2223 if (new_i3_notes)
2224 {
2225 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2226 if (GET_CODE (XEXP (temp, 0)) == REG)
2227 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2228
2229 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2230 }
2231
2232 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
2233 put a REG_DEAD note for it somewhere. Similarly for I2 and I1.
2234 Show an additional death due to the REG_DEAD note we make here. If
2235 we discard it in distribute_notes, we will decrement it again. */
2236
2237 if (i3dest_killed)
2238 {
2239 if (GET_CODE (i3dest_killed) == REG)
2240 reg_n_deaths[REGNO (i3dest_killed)]++;
2241
2242 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed,
2243 NULL_RTX),
2244 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2245 NULL_RTX, NULL_RTX);
2246 }
2247
2248 /* For I2 and I1, we have to be careful. If NEWI2PAT exists and sets
2249 I2DEST or I1DEST, the death must be somewhere before I2, not I3. If
2250 we passed I3 in that case, it might delete I2. */
2251
2252 if (i2dest_in_i2src)
2253 {
2254 if (GET_CODE (i2dest) == REG)
2255 reg_n_deaths[REGNO (i2dest)]++;
2256
2257 if (newi2pat && reg_set_p (i2dest, newi2pat))
2258 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2259 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2260 else
2261 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2262 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2263 NULL_RTX, NULL_RTX);
2264 }
2265
2266 if (i1dest_in_i1src)
2267 {
2268 if (GET_CODE (i1dest) == REG)
2269 reg_n_deaths[REGNO (i1dest)]++;
2270
2271 if (newi2pat && reg_set_p (i1dest, newi2pat))
2272 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2273 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2274 else
2275 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2276 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2277 NULL_RTX, NULL_RTX);
2278 }
2279
2280 distribute_links (i3links);
2281 distribute_links (i2links);
2282 distribute_links (i1links);
2283

--- 14 unchanged lines hidden (view full) ---

2298 if ((set = single_set (XEXP (link, 0))) != 0
2299 && rtx_equal_p (i2dest, SET_DEST (set)))
2300 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2301
2302 record_value_for_reg (i2dest, i2_insn, i2_val);
2303
2304 /* If the reg formerly set in I2 died only once and that was in I3,
2305 zero its use count so it won't make `reload' do any work. */
2306 if (! added_sets_2 && newi2pat == 0 && ! i2dest_in_i2src)
2307 {
2308 regno = REGNO (i2dest);
2309 reg_n_sets[regno]--;
2310 if (reg_n_sets[regno] == 0
2311 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2312 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
2313 reg_n_refs[regno] = 0;
2314 }
2315 }
2316
2317 if (i1 && GET_CODE (i1dest) == REG)
2318 {
2319 rtx link;
2320 rtx i1_insn = 0, i1_val = 0, set;
2321
2322 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2323 if ((set = single_set (XEXP (link, 0))) != 0
2324 && rtx_equal_p (i1dest, SET_DEST (set)))
2325 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2326
2327 record_value_for_reg (i1dest, i1_insn, i1_val);
2328
2329 regno = REGNO (i1dest);
2330 if (! added_sets_1 && ! i1dest_in_i1src)
2331 {
2332 reg_n_sets[regno]--;
2333 if (reg_n_sets[regno] == 0
2334 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2335 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
2336 reg_n_refs[regno] = 0;
2337 }
2338 }
2339
2340 /* Update reg_nonzero_bits et al for any changes that may have been made
2341 to this insn. */
2342
2343 note_stores (newpat, set_nonzero_bits_and_sign_copies);
2344 if (newi2pat)

--- 29 unchanged lines hidden (view full) ---

2374 return newi2pat ? i2 : i3;
2375}
2376
2377/* Undo all the modifications recorded in undobuf. */
2378
2379static void
2380undo_all ()
2381{
2382 register int i;
2383 if (undobuf.num_undo > MAX_UNDO)
2384 undobuf.num_undo = MAX_UNDO;
2385 for (i = undobuf.num_undo - 1; i >= 0; i--)
2386 {
2387 if (undobuf.undo[i].is_int)
2388 *undobuf.undo[i].where.i = undobuf.undo[i].old_contents.i;
2389 else
2390 *undobuf.undo[i].where.r = undobuf.undo[i].old_contents.r;
2391
2392 }
2393
2394 obfree (undobuf.storage);
2395 undobuf.num_undo = 0;
2396
2397 /* Clear this here, so that subsequent get_last_value calls are not
2398 affected. */
2399 subst_prev_insn = NULL_RTX;
2400}
2401
2402/* Find the innermost point within the rtx at LOC, possibly LOC itself,
2403 where we have an arithmetic expression and return that point. LOC will

--- 45 unchanged lines hidden (view full) ---

2449 the machine-specific way to split large constants. We use
2450 the first pseudo-reg (one of the virtual regs) as a placeholder;
2451 it will not remain in the result. */
2452 if (GET_CODE (XEXP (x, 0)) == PLUS
2453 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2454 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2455 {
2456 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2457 rtx seq = split_insns (gen_rtx (SET, VOIDmode, reg, XEXP (x, 0)),
2458 subst_insn);
2459
2460 /* This should have produced two insns, each of which sets our
2461 placeholder. If the source of the second is a valid address,
2462 we can make put both sources together and make a split point
2463 in the middle. */
2464
2465 if (seq && XVECLEN (seq, 0) == 2

--- 60 unchanged lines hidden (view full) ---

2526 return &SET_SRC (x);
2527#endif
2528
2529 /* See if we can split SET_SRC as it stands. */
2530 split = find_split_point (&SET_SRC (x), insn);
2531 if (split && split != &SET_SRC (x))
2532 return split;
2533
2534 /* See if this is a bitfield assignment with everything constant. If
2535 so, this is an IOR of an AND, so split it into that. */
2536 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2537 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2538 <= HOST_BITS_PER_WIDE_INT)
2539 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2540 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2541 && GET_CODE (SET_SRC (x)) == CONST_INT

--- 50 unchanged lines hidden (view full) ---

2592 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2593 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2594 && GET_CODE (SET_DEST (x)) == REG
2595 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2596 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2597 && XEXP (*split, 0) == SET_DEST (x)
2598 && XEXP (*split, 1) == const0_rtx)
2599 {
2600 SUBST (SET_SRC (x),
2601 make_extraction (GET_MODE (SET_DEST (x)),
2602 XEXP (SET_SRC (x), 0),
2603 pos, NULL_RTX, 1, 1, 0, 0));
2604 return find_split_point (loc, insn);
2605 }
2606 break;
2607
2608 case SIGN_EXTEND:
2609 inner = XEXP (SET_SRC (x), 0);
2610 pos = 0;
2611 len = GET_MODE_BITSIZE (GET_MODE (inner));
2612 unsignedp = 0;
2613 break;
2614
2615 case SIGN_EXTRACT:
2616 case ZERO_EXTRACT:
2617 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2618 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2619 {
2620 inner = XEXP (SET_SRC (x), 0);
2621 len = INTVAL (XEXP (SET_SRC (x), 1));
2622 pos = INTVAL (XEXP (SET_SRC (x), 2));
2623
2624 if (BITS_BIG_ENDIAN)
2625 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
2626 unsignedp = (code == ZERO_EXTRACT);
2627 }
2628 break;
2629 }
2630
2631 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2632 {
2633 enum machine_mode mode = GET_MODE (SET_SRC (x));
2634
2635 /* For unsigned, we have a choice of a shift followed by an
2636 AND or two shifts. Use two shifts for field sizes where the

--- 80 unchanged lines hidden (view full) ---

2717 other operand first. */
2718 if (GET_CODE (XEXP (x, 1)) == NOT)
2719 {
2720 rtx tem = XEXP (x, 0);
2721 SUBST (XEXP (x, 0), XEXP (x, 1));
2722 SUBST (XEXP (x, 1), tem);
2723 }
2724 break;
2725 }
2726
2727 /* Otherwise, select our actions depending on our rtx class. */
2728 switch (GET_RTX_CLASS (code))
2729 {
2730 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2731 case '3':
2732 split = find_split_point (&XEXP (x, 2), insn);
2733 if (split)
2734 return split;
2735 /* ... fall through ... */
2736 case '2':
2737 case 'c':
2738 case '<':
2739 split = find_split_point (&XEXP (x, 1), insn);
2740 if (split)
2741 return split;
2742 /* ... fall through ... */
2743 case '1':
2744 /* Some machines have (and (shift ...) ...) insns. If X is not
2745 an AND, but XEXP (X, 0) is, use it as our split point. */
2746 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2747 return &XEXP (x, 0);
2748
2749 split = find_split_point (&XEXP (x, 0), insn);
2750 if (split)

--- 55 unchanged lines hidden (view full) ---

2806 not have been seen as equal above. However, flow.c will make a
2807 LOG_LINKS entry for that case. If we do nothing, we will try to
2808 rerecognize our original insn and, when it succeeds, we will
2809 delete the feeding insn, which is incorrect.
2810
2811 So force this insn not to match in this (rare) case. */
2812 if (! in_dest && code == REG && GET_CODE (from) == REG
2813 && REGNO (x) == REGNO (from))
2814 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
2815
2816 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2817 of which may contain things that can be combined. */
2818 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
2819 return x;
2820
2821 /* It is possible to have a subexpression appear twice in the insn.
2822 Suppose that FROM is a register that appears within TO.
2823 Then, after that subexpression has been scanned once by `subst',
2824 the second time it is scanned, TO may be found. If we were
2825 to scan TO here, we would find FROM within it and create a
2826 self-referent rtl structure which is completely wrong. */
2827 if (COMBINE_RTX_EQUAL_P (x, to))
2828 return to;
2829
2830 len = GET_RTX_LENGTH (code);
2831 fmt = GET_RTX_FORMAT (code);
2832
2833 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
2834 set up to skip this common case. All other cases where we want to
2835 suppress replacing something inside a SET_SRC are handled via the
2836 IN_DEST operand. */
2837 if (code == SET
2838 && (GET_CODE (SET_DEST (x)) == REG
2839 || GET_CODE (SET_DEST (x)) == CC0
2840 || GET_CODE (SET_DEST (x)) == PC))
2841 fmt = "ie";
2842
2843 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a constant. */
2844 if (fmt[0] == 'e')
2845 op0_mode = GET_MODE (XEXP (x, 0));
2846
2847 for (i = 0; i < len; i++)
2848 {
2849 if (fmt[i] == 'E')
2850 {
2851 register int j;
2852 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2853 {
2854 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
2855 {
2856 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2857 n_occurrences++;
2858 }
2859 else
2860 {
2861 new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
2862
2863 /* If this substitution failed, this whole thing fails. */
2864 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2865 return new;
2866 }
2867
2868 SUBST (XVECEXP (x, i, j), new);
2869 }
2870 }
2871 else if (fmt[i] == 'e')
2872 {
2873 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
2874 {
2875 /* In general, don't install a subreg involving two modes not
2876 tieable. It can worsen register allocation, and can even
2877 make invalid reload insns, since the reg inside may need to
2878 be copied from in the outside mode, and that may be invalid
2879 if it is an fp reg copied in integer mode.
2880
2881 We allow two exceptions to this: It is valid if it is inside
2882 another SUBREG and the mode of that SUBREG and the mode of
2883 the inside of TO is tieable and it is valid if X is a SET
2884 that copies FROM to CC0. */
2885 if (GET_CODE (to) == SUBREG
2886 && ! MODES_TIEABLE_P (GET_MODE (to),
2887 GET_MODE (SUBREG_REG (to)))
2888 && ! (code == SUBREG
2889 && MODES_TIEABLE_P (GET_MODE (x),
2890 GET_MODE (SUBREG_REG (to))))
2891#ifdef HAVE_cc0
2892 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
2893#endif
2894 )
2895 return gen_rtx (CLOBBER, VOIDmode, const0_rtx);
2896
2897 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2898 n_occurrences++;
2899 }
2900 else
2901 /* If we are in a SET_DEST, suppress most cases unless we
2902 have gone inside a MEM, in which case we want to
2903 simplify the address. We assume here that things that
2904 are actually part of the destination have their inner
2905 parts in the first expression. This is true for SUBREG,
2906 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
2907 things aside from REG and MEM that should appear in a
2908 SET_DEST. */
2909 new = subst (XEXP (x, i), from, to,
2910 (((in_dest
2911 && (code == SUBREG || code == STRICT_LOW_PART
2912 || code == ZERO_EXTRACT))
2913 || code == SET)
2914 && i == 0), unique_copy);
2915
2916 /* If we found that we will have to reject this combination,
2917 indicate that by returning the CLOBBER ourselves, rather than
2918 an expression containing it. This will speed things up as
2919 well as prevent accidents where two CLOBBERs are considered
2920 to be equal, thus producing an incorrect simplification. */
2921
2922 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2923 return new;
2924
2925 SUBST (XEXP (x, i), new);
2926 }
2927 }
2928
2929 /* Try to simplify X. If the simplification changed the code, it is likely
2930 that further simplification will help, so loop, but limit the number
2931 of repetitions that will be performed. */
2932
2933 for (i = 0; i < 4; i++)

--- 113 unchanged lines hidden (view full) ---

3047 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3048 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3049 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3050 == 'o'))))))
3051 {
3052 rtx cond, true, false;
3053
3054 cond = if_then_else_cond (x, &true, &false);
3055 if (cond != 0)
3056 {
3057 rtx cop1 = const0_rtx;
3058 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3059
3060 if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3061 return x;
3062
3063 /* Simplify the alternative arms; this may collapse the true and

--- 20 unchanged lines hidden (view full) ---

3084 gen_binary (cond_code, mode, cond, cop1));
3085 else if (GET_CODE (false) == CONST_INT
3086 && INTVAL (false) == - STORE_FLAG_VALUE
3087 && true == const0_rtx)
3088 x = gen_unary (NEG, mode, mode,
3089 gen_binary (reverse_condition (cond_code),
3090 mode, cond, cop1));
3091 else
3092 return gen_rtx (IF_THEN_ELSE, mode,
3093 gen_binary (cond_code, VOIDmode, cond, cop1),
3094 true, false);
3095
3096 code = GET_CODE (x);
3097 op0_mode = VOIDmode;
3098 }
3099 }
3100
3101 /* Try to fold this expression in case we have constants that weren't
3102 present before. */

--- 99 unchanged lines hidden (view full) ---

3202 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3203 {
3204 rtx inner = SUBREG_REG (x);
3205 int endian_offset = 0;
3206 /* Don't change the mode of the MEM
3207 if that would change the meaning of the address. */
3208 if (MEM_VOLATILE_P (SUBREG_REG (x))
3209 || mode_dependent_address_p (XEXP (inner, 0)))
3210 return gen_rtx (CLOBBER, mode, const0_rtx);
3211
3212 if (BYTES_BIG_ENDIAN)
3213 {
3214 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3215 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
3216 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
3217 endian_offset -= (UNITS_PER_WORD
3218 - GET_MODE_SIZE (GET_MODE (inner)));
3219 }
3220 /* Note if the plus_constant doesn't make a valid address
3221 then this combination won't be accepted. */
3222 x = gen_rtx (MEM, mode,
3223 plus_constant (XEXP (inner, 0),
3224 (SUBREG_WORD (x) * UNITS_PER_WORD
3225 + endian_offset)));
3226 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
3227 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
3228 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
3229 return x;
3230 }
3231
3232 /* If we are in a SET_DEST, these other cases can't apply. */
3233 if (in_dest)

--- 25 unchanged lines hidden (view full) ---

3259#endif
3260#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3261 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
3262#endif
3263 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
3264 {
3265 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3266 mode))
3267 return gen_rtx (REG, mode,
3268 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
3269 else
3270 return gen_rtx (CLOBBER, mode, const0_rtx);
3271 }
3272
3273 /* For a constant, try to pick up the part we want. Handle a full
3274 word and low-order part. Only do this if we are narrowing
3275 the constant; if it is being widened, we have no idea what
3276 the extra bits will have been set to. */
3277
3278 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3279 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3280 && GET_MODE_SIZE (op0_mode) < UNITS_PER_WORD
3281 && GET_MODE_CLASS (mode) == MODE_INT)
3282 {
3283 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
3284 0, op0_mode);
3285 if (temp)
3286 return temp;
3287 }
3288
3289 /* If we want a subreg of a constant, at offset 0,
3290 take the low bits. On a little-endian machine, that's
3291 always valid. On a big-endian machine, it's valid
3292 only if the constant's mode fits in one word. */
3293 if (CONSTANT_P (SUBREG_REG (x)) && subreg_lowpart_p (x)
3294 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (op0_mode)
3295 && (! WORDS_BIG_ENDIAN
3296 || GET_MODE_BITSIZE (op0_mode) <= BITS_PER_WORD))
3297 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3298
3299 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
3300 since we are saying that the high bits don't matter. */
3301 if (CONSTANT_P (SUBREG_REG (x)) && GET_MODE (SUBREG_REG (x)) == VOIDmode
3302 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (op0_mode))

--- 25 unchanged lines hidden (view full) ---

3328 return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
3329
3330 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3331 other than 1, but that is not valid. We could do a similar
3332 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3333 but this doesn't seem common enough to bother with. */
3334 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3335 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3336 return gen_rtx (ROTATE, mode, gen_unary (NOT, mode, mode, const1_rtx),
3337 XEXP (XEXP (x, 0), 1));
3338
3339 if (GET_CODE (XEXP (x, 0)) == SUBREG
3340 && subreg_lowpart_p (XEXP (x, 0))
3341 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3342 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3343 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3344 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3345 {
3346 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3347
3348 x = gen_rtx (ROTATE, inner_mode,
3349 gen_unary (NOT, inner_mode, inner_mode, const1_rtx),
3350 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
3351 return gen_lowpart_for_combine (mode, x);
3352 }
3353
3354#if STORE_FLAG_VALUE == -1
3355 /* (not (comparison foo bar)) can be done by reversing the comparison
3356 code if valid. */
3357 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3358 && reversible_comparison_p (XEXP (x, 0)))
3359 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3360 mode, XEXP (XEXP (x, 0), 0),
3361 XEXP (XEXP (x, 0), 1));
3362
3363 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
3364 is (lt foo (const_int 0)), so we can perform the above
3365 simplification. */
3366
3367 if (XEXP (x, 1) == const1_rtx
3368 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3369 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3370 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3371 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
3372#endif
3373
3374 /* Apply De Morgan's laws to reduce number of patterns for machines
3375 with negating logical insns (and-not, nand, etc.). If result has
3376 only one NOT, put it first, since that is how the patterns are
3377 coded. */
3378
3379 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3380 {

--- 31 unchanged lines hidden (view full) ---

3412
3413 /* Similarly, (neg (not X)) is (plus X 1). */
3414 if (GET_CODE (XEXP (x, 0)) == NOT)
3415 return plus_constant (XEXP (XEXP (x, 0), 0), 1);
3416
3417 /* (neg (minus X Y)) can become (minus Y X). */
3418 if (GET_CODE (XEXP (x, 0)) == MINUS
3419 && (! FLOAT_MODE_P (mode)
3420 /* x-y != -(y-x) with IEEE floating point. */
3421 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3422 || flag_fast_math))
3423 return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3424 XEXP (XEXP (x, 0), 0));
3425
3426 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
3427 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
3428 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
3429 return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3430
3431 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3432 if we can then eliminate the NEG (e.g.,
3433 if the operand is a constant). */
3434

--- 44 unchanged lines hidden (view full) ---

3479 if (GET_CODE (temp1) != ASHIFTRT
3480 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3481 || XEXP (XEXP (temp1, 0), 0) != temp)
3482 return temp1;
3483 }
3484 break;
3485
3486 case TRUNCATE:
3487 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3488 SUBST (XEXP (x, 0),
3489 force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
3490 GET_MODE_MASK (mode), NULL_RTX, 0));
3491 break;
3492
3493 case FLOAT_TRUNCATE:
3494 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3495 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3496 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3497 return XEXP (XEXP (x, 0), 0);
3498

--- 123 unchanged lines hidden (view full) ---

3622
3623 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3624 && (nonzero_bits (XEXP (x, 0), mode)
3625 & nonzero_bits (XEXP (x, 1), mode)) == 0)
3626 return gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
3627 break;
3628
3629 case MINUS:
3630#if STORE_FLAG_VALUE == 1
3631 /* (minus 1 (comparison foo bar)) can be done by reversing the comparison
3632 code if valid. */
3633 if (XEXP (x, 0) == const1_rtx
3634 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
3635 && reversible_comparison_p (XEXP (x, 1)))
3636 return gen_binary (reverse_condition (GET_CODE (XEXP (x, 1))),
3637 mode, XEXP (XEXP (x, 1), 0),
3638 XEXP (XEXP (x, 1), 1));
3639#endif
3640
3641 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3642 (and <foo> (const_int pow2-1)) */
3643 if (GET_CODE (XEXP (x, 1)) == AND
3644 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3645 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3646 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
3647 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),

--- 57 unchanged lines hidden (view full) ---

3705 enum rtx_code new_code;
3706
3707 if (GET_CODE (op0) == COMPARE)
3708 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3709
3710 /* Simplify our comparison, if possible. */
3711 new_code = simplify_comparison (code, &op0, &op1);
3712
3713#if STORE_FLAG_VALUE == 1
3714 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
3715 if only the low-order bit is possibly nonzero in X (such as when
3716 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
3717 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
3718 known to be either 0 or -1, NE becomes a NEG and EQ becomes
3719 (plus X 1).
3720
3721 Remove any ZERO_EXTRACT we made when thinking this was a
3722 comparison. It may now be simpler to use, e.g., an AND. If a
3723 ZERO_EXTRACT is indeed appropriate, it will be placed back by
3724 the call to make_compound_operation in the SET case. */
3725
3726 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3727 && op1 == const0_rtx
3728 && nonzero_bits (op0, mode) == 1)
3729 return gen_lowpart_for_combine (mode,
3730 expand_compound_operation (op0));
3731
3732 else if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3733 && op1 == const0_rtx
3734 && (num_sign_bit_copies (op0, mode)
3735 == GET_MODE_BITSIZE (mode)))
3736 {
3737 op0 = expand_compound_operation (op0);
3738 return gen_unary (NEG, mode, mode,
3739 gen_lowpart_for_combine (mode, op0));
3740 }
3741
3742 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3743 && op1 == const0_rtx
3744 && nonzero_bits (op0, mode) == 1)
3745 {
3746 op0 = expand_compound_operation (op0);
3747 return gen_binary (XOR, mode,
3748 gen_lowpart_for_combine (mode, op0),
3749 const1_rtx);
3750 }
3751
3752 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3753 && op1 == const0_rtx
3754 && (num_sign_bit_copies (op0, mode)
3755 == GET_MODE_BITSIZE (mode)))
3756 {
3757 op0 = expand_compound_operation (op0);
3758 return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
3759 }
3760#endif
3761
3762#if STORE_FLAG_VALUE == -1
3763 /* If STORE_FLAG_VALUE is -1, we have cases similar to
3764 those above. */
3765 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3766 && op1 == const0_rtx
3767 && (num_sign_bit_copies (op0, mode)
3768 == GET_MODE_BITSIZE (mode)))
3769 return gen_lowpart_for_combine (mode,
3770 expand_compound_operation (op0));
3771
3772 else if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3773 && op1 == const0_rtx
3774 && nonzero_bits (op0, mode) == 1)
3775 {
3776 op0 = expand_compound_operation (op0);
3777 return gen_unary (NEG, mode, mode,
3778 gen_lowpart_for_combine (mode, op0));
3779 }
3780
3781 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3782 && op1 == const0_rtx
3783 && (num_sign_bit_copies (op0, mode)
3784 == GET_MODE_BITSIZE (mode)))
3785 {
3786 op0 = expand_compound_operation (op0);
3787 return gen_unary (NOT, mode, mode,
3788 gen_lowpart_for_combine (mode, op0));
3789 }
3790
3791 /* If X is 0/1, (eq X 0) is X-1. */
3792 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3793 && op1 == const0_rtx
3794 && nonzero_bits (op0, mode) == 1)
3795 {
3796 op0 = expand_compound_operation (op0);
3797 return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
3798 }
3799#endif
3800
3801 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
3802 one bit that might be nonzero, we can convert (ne x 0) to
3803 (ashift x c) where C puts the bit in the sign bit. Remove any
3804 AND with STORE_FLAG_VALUE when we are done, since we are only
3805 going to test the sign bit. */
3806 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3807 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3808 && (STORE_FLAG_VALUE
3809 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
3810 && op1 == const0_rtx
3811 && mode == GET_MODE (op0)
3812 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
3813 {
3814 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3815 expand_compound_operation (op0),
3816 GET_MODE_BITSIZE (mode) - 1 - i);

--- 16 unchanged lines hidden (view full) ---

3833
3834 case IF_THEN_ELSE:
3835 return simplify_if_then_else (x);
3836
3837 case ZERO_EXTRACT:
3838 case SIGN_EXTRACT:
3839 case ZERO_EXTEND:
3840 case SIGN_EXTEND:
3841 /* If we are processing SET_DEST, we are done. */
3842 if (in_dest)
3843 return x;
3844
3845 return expand_compound_operation (x);
3846
3847 case SET:
3848 return simplify_set (x);
3849
3850 case AND:
3851 case IOR:
3852 case XOR:
3853 return simplify_logical (x, last);
3854
3855 case ABS:
3856 /* (abs (neg <foo>)) -> (abs <foo>) */
3857 if (GET_CODE (XEXP (x, 0)) == NEG)
3858 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3859
3860 /* If operand is something known to be positive, ignore the ABS. */
3861 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
3862 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
3863 <= HOST_BITS_PER_WIDE_INT)
3864 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3865 & ((HOST_WIDE_INT) 1
3866 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
3867 == 0)))

--- 35 unchanged lines hidden (view full) ---

3903 force_to_mode (XEXP (x, 1), GET_MODE (x),
3904 ((HOST_WIDE_INT) 1
3905 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
3906 - 1,
3907 NULL_RTX, 0));
3908#endif
3909
3910 break;
3911 }
3912
3913 return x;
3914}
3915
3916/* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
3917
3918static rtx

--- 4 unchanged lines hidden (view full) ---

3923 rtx cond = XEXP (x, 0);
3924 rtx true = XEXP (x, 1);
3925 rtx false = XEXP (x, 2);
3926 enum rtx_code true_code = GET_CODE (cond);
3927 int comparison_p = GET_RTX_CLASS (true_code) == '<';
3928 rtx temp;
3929 int i;
3930
3931 /* Simplify storing of the truth value. */
3932 if (comparison_p && true == const_true_rtx && false == const0_rtx)
3933 return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
3934
3935 /* Also when the truth value has to be reversed. */
3936 if (comparison_p && reversible_comparison_p (cond)
3937 && true == const0_rtx && false == const_true_rtx)
3938 return gen_binary (reverse_condition (true_code),
3939 mode, XEXP (cond, 0), XEXP (cond, 1));
3940
3941 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
3942 in it is being compared against certain values. Get the true and false
3943 comparisons and see if that says anything about the value of each arm. */

--- 69 unchanged lines hidden (view full) ---

4013 SUBST (XEXP (x, 0),
4014 gen_binary (true_code, GET_MODE (cond), XEXP (cond, 0),
4015 XEXP (cond, 1)));
4016
4017 SUBST (XEXP (x, 1), false);
4018 SUBST (XEXP (x, 2), true);
4019
4020 temp = true, true = false, false = temp, cond = XEXP (x, 0);
4021 }
4022
4023 /* If the two arms are identical, we don't need the comparison. */
4024
4025 if (rtx_equal_p (true, false) && ! side_effects_p (cond))
4026 return true;
4027
4028 /* Look for cases where we have (abs x) or (neg (abs X)). */
4029
4030 if (GET_MODE_CLASS (mode) == MODE_INT
4031 && GET_CODE (false) == NEG
4032 && rtx_equal_p (true, XEXP (false, 0))
4033 && comparison_p
4034 && rtx_equal_p (true, XEXP (cond, 0))
4035 && ! side_effects_p (true))
4036 switch (true_code)
4037 {
4038 case GT:
4039 case GE:
4040 return gen_unary (ABS, mode, mode, true);
4041 case LT:
4042 case LE:
4043 return gen_unary (NEG, mode, mode, gen_unary (ABS, mode, mode, true));
4044 }
4045
4046 /* Look for MIN or MAX. */
4047
4048 if ((! FLOAT_MODE_P (mode) || flag_fast_math)
4049 && comparison_p
4050 && rtx_equal_p (XEXP (cond, 0), true)
4051 && rtx_equal_p (XEXP (cond, 1), false)

--- 7 unchanged lines hidden (view full) ---

4059 case LT:
4060 return gen_binary (SMIN, mode, true, false);
4061 case GEU:
4062 case GTU:
4063 return gen_binary (UMAX, mode, true, false);
4064 case LEU:
4065 case LTU:
4066 return gen_binary (UMIN, mode, true, false);
4067 }
4068
4069#if STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1
4070
4071 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4072 second operand is zero, this can be done as (OP Z (mult COND C2)) where
4073 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4074 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4075 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
4076 neither of the above, but it isn't worth checking for. */
4077
4078 if (comparison_p && mode != VOIDmode && ! side_effects_p (x))
4079 {
4080 rtx t = make_compound_operation (true, SET);
4081 rtx f = make_compound_operation (false, SET);
4082 rtx cond_op0 = XEXP (cond, 0);
4083 rtx cond_op1 = XEXP (cond, 1);
4084 enum rtx_code op, extend_op = NIL;
4085 enum machine_mode m = mode;
4086 rtx z = 0, c1;
4087
4088 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4089 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4090 || GET_CODE (t) == ASHIFT
4091 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4092 && rtx_equal_p (XEXP (t, 0), f))
4093 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4094
4095 /* If an identity-zero op is commutative, check whether there
4096 would be a match if we swapped the operands. */
4097 else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4098 || GET_CODE (t) == XOR)
4099 && rtx_equal_p (XEXP (t, 1), f))
4100 c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4101 else if (GET_CODE (t) == SIGN_EXTEND
4102 && (GET_CODE (XEXP (t, 0)) == PLUS
4103 || GET_CODE (XEXP (t, 0)) == MINUS
4104 || GET_CODE (XEXP (t, 0)) == IOR

--- 74 unchanged lines hidden (view full) ---

4179 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4180
4181 if (extend_op != NIL)
4182 temp = gen_unary (extend_op, mode, m, temp);
4183
4184 return temp;
4185 }
4186 }
4187#endif
4188
4189 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4190 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4191 negation of a single bit, we can convert this operation to a shift. We
4192 can actually do this more generally, but it doesn't seem worth it. */
4193
4194 if (true_code == NE && XEXP (cond, 1) == const0_rtx
4195 && false == const0_rtx && GET_CODE (true) == CONST_INT

--- 70 unchanged lines hidden (view full) ---

4266 /* If the mode changed, we have to change SET_DEST, the mode in the
4267 compare, and the mode in the place SET_DEST is used. If SET_DEST is
4268 a hard register, just build new versions with the proper mode. If it
4269 is a pseudo, we lose unless it is only time we set the pseudo, in
4270 which case we can safely change its mode. */
4271 if (compare_mode != GET_MODE (dest))
4272 {
4273 int regno = REGNO (dest);
4274 rtx new_dest = gen_rtx (REG, compare_mode, regno);
4275
4276 if (regno < FIRST_PSEUDO_REGISTER
4277 || (reg_n_sets[regno] == 1 && ! REG_USERVAR_P (dest)))
4278 {
4279 if (regno >= FIRST_PSEUDO_REGISTER)
4280 SUBST (regno_reg_rtx[regno], new_dest);
4281
4282 SUBST (SET_DEST (x), new_dest);
4283 SUBST (XEXP (*cc_use, 0), new_dest);
4284 other_changed = 1;
4285

--- 79 unchanged lines hidden (view full) ---

4365 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
4366 and X being a REG or (subreg (reg)), we may be able to convert this to
4367 (set (subreg:m2 x) (op)).
4368
4369 We can always do this if M1 is narrower than M2 because that means that
4370 we only care about the low bits of the result.
4371
4372 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
4373 perform a narrower operation that requested since the high-order bits will
4374 be undefined. On machine where it is defined, this transformation is safe
4375 as long as M1 and M2 have the same number of words. */
4376
4377 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4378 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
4379 && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
4380 / UNITS_PER_WORD)
4381 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))

--- 20 unchanged lines hidden (view full) ---

4402 SUBST (SET_SRC (x), SUBREG_REG (src));
4403
4404 src = SET_SRC (x), dest = SET_DEST (x);
4405 }
4406
4407#ifdef LOAD_EXTEND_OP
4408 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
4409 would require a paradoxical subreg. Replace the subreg with a
4410 zero_extend to avoid the reload that would otherwise be required. */
4411
4412 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4413 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
4414 && SUBREG_WORD (src) == 0
4415 && (GET_MODE_SIZE (GET_MODE (src))
4416 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
4417 && GET_CODE (SUBREG_REG (src)) == MEM)
4418 {

--- 111 unchanged lines hidden (view full) ---

4530 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
4531
4532 if (GET_CODE (op1) == CONST_INT)
4533 {
4534 x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
4535
4536 /* If we have (ior (and (X C1) C2)) and the next restart would be
4537 the last, simplify this by making C1 as small as possible
4538 and then exit. */
4539 if (last
4540 && GET_CODE (x) == IOR && GET_CODE (op0) == AND
4541 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4542 && GET_CODE (op1) == CONST_INT)
4543 return gen_binary (IOR, mode,
4544 gen_binary (AND, mode, XEXP (op0, 0),
4545 GEN_INT (INTVAL (XEXP (op0, 1))
4546 & ~ INTVAL (op1))), op1);

--- 106 unchanged lines hidden (view full) ---

4653
4654 if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
4655 || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
4656 && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
4657 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4658 && GET_CODE (XEXP (op1, 1)) == CONST_INT
4659 && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
4660 == GET_MODE_BITSIZE (mode)))
4661 return gen_rtx (ROTATE, mode, XEXP (op0, 0),
4662 (GET_CODE (op0) == ASHIFT
4663 ? XEXP (op0, 1) : XEXP (op1, 1)));
4664
4665 /* If OP0 is (ashiftrt (plus ...) C), it might actually be
4666 a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS
4667 does not affect any of the bits in OP1, it can really be done
4668 as a PLUS and we can associate. We do this by seeing if OP1
4669 can be safely shifted left C bits. */
4670 if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
4671 && GET_CODE (XEXP (op0, 0)) == PLUS

--- 48 unchanged lines hidden (view full) ---

4720
4721 else if (GET_CODE (op0) == AND
4722 && rtx_equal_p (XEXP (op0, 0), op1)
4723 && ! side_effects_p (op1))
4724 return gen_binary (AND, mode,
4725 gen_unary (NOT, mode, mode, XEXP (op0, 1)),
4726 op1);
4727
4728#if STORE_FLAG_VALUE == 1
4729 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
4730 comparison. */
4731 if (op1 == const1_rtx
4732 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
4733 && reversible_comparison_p (op0))
4734 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
4735 mode, XEXP (op0, 0), XEXP (op0, 1));
4736
4737 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
4738 is (lt foo (const_int 0)), so we can perform the above
4739 simplification. */
4740
4741 if (op1 == const1_rtx
4742 && GET_CODE (op0) == LSHIFTRT
4743 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4744 && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
4745 return gen_rtx_combine (GE, mode, XEXP (op0, 0), const0_rtx);
4746#endif
4747
4748 /* (xor (comparison foo bar) (const_int sign-bit))
4749 when STORE_FLAG_VALUE is the sign bit. */
4750 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4751 && (STORE_FLAG_VALUE
4752 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
4753 && op1 == const_true_rtx
4754 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
4755 && reversible_comparison_p (op0))
4756 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
4757 mode, XEXP (op0, 0), XEXP (op0, 1));
4758 break;
4759 }
4760
4761 return x;
4762}
4763
4764/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
4765 operations" because they can be replaced with two more basic operations.
4766 ZERO_EXTEND is also considered "compound" because it can be replaced with

--- 67 unchanged lines hidden (view full) ---

4834
4835 len = INTVAL (XEXP (x, 1));
4836 pos = INTVAL (XEXP (x, 2));
4837
4838 /* If this goes outside the object being extracted, replace the object
4839 with a (use (mem ...)) construct that only combine understands
4840 and is used only for this purpose. */
4841 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4842 SUBST (XEXP (x, 0), gen_rtx (USE, GET_MODE (x), XEXP (x, 0)));
4843
4844 if (BITS_BIG_ENDIAN)
4845 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
4846
4847 break;
4848
4849 default:
4850 return x;
4851 }
4852
4853 /* If we reach here, we want to return a pair of shifts. The inner
4854 shift is a left shift of BITSIZE - POS - LEN bits. The outer
4855 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
4856 logical depending on the value of UNSIGNEDP.
4857
4858 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
4859 converted into an AND of a shift.
4860

--- 41 unchanged lines hidden (view full) ---

4902 We half-heartedly support variable positions, but do not at all
4903 support variable lengths. */
4904
4905static rtx
4906expand_field_assignment (x)
4907 rtx x;
4908{
4909 rtx inner;
4910 rtx pos; /* Always counts from low bit. */
4911 int len;
4912 rtx mask;
4913 enum machine_mode compute_mode;
4914
4915 /* Loop until we find something we can't simplify. */
4916 while (1)
4917 {
4918 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
4919 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
4920 {
4921 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
4922 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4923 pos = const0_rtx;
4924 }
4925 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
4926 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
4927 {
4928 inner = XEXP (SET_DEST (x), 0);
4929 len = INTVAL (XEXP (SET_DEST (x), 1));
4930 pos = XEXP (SET_DEST (x), 2);
4931
4932 /* If the position is constant and spans the width of INNER,
4933 surround INNER with a USE to indicate this. */
4934 if (GET_CODE (pos) == CONST_INT
4935 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
4936 inner = gen_rtx (USE, GET_MODE (SET_DEST (x)), inner);
4937
4938 if (BITS_BIG_ENDIAN)
4939 {
4940 if (GET_CODE (pos) == CONST_INT)
4941 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
4942 - INTVAL (pos));
4943 else if (GET_CODE (pos) == MINUS
4944 && GET_CODE (XEXP (pos, 1)) == CONST_INT

--- 12 unchanged lines hidden (view full) ---

4957 /* A SUBREG between two modes that occupy the same numbers of words
4958 can be done by moving the SUBREG to the source. */
4959 else if (GET_CODE (SET_DEST (x)) == SUBREG
4960 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
4961 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
4962 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
4963 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
4964 {
4965 x = gen_rtx (SET, VOIDmode, SUBREG_REG (SET_DEST (x)),
4966 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
4967 SET_SRC (x)));
4968 continue;
4969 }
4970 else
4971 break;
4972
4973 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4974 inner = SUBREG_REG (inner);
4975
4976 compute_mode = GET_MODE (inner);
4977
4978 /* Compute a mask of LEN bits, if we can do this on the host machine. */
4979 if (len < HOST_BITS_PER_WIDE_INT)
4980 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
4981 else
4982 break;
4983
4984 /* Now compute the equivalent expression. Make a copy of INNER
4985 for the SET_DEST in case it is a MEM into which we will substitute;
4986 we don't want shared RTL in that case. */
4987 x = gen_rtx (SET, VOIDmode, copy_rtx (inner),
4988 gen_binary (IOR, compute_mode,
4989 gen_binary (AND, compute_mode,
4990 gen_unary (NOT, compute_mode,
4991 compute_mode,
4992 gen_binary (ASHIFT,
4993 compute_mode,
4994 mask, pos)),
4995 inner),
4996 gen_binary (ASHIFT, compute_mode,
4997 gen_binary (AND, compute_mode,
4998 gen_lowpart_for_combine
4999 (compute_mode,
5000 SET_SRC (x)),
5001 mask),
5002 pos)));
5003 }
5004
5005 return x;
5006}
5007
5008/* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
5009 it is an RTX that represents a variable starting position; otherwise,
5010 POS is the (constant) starting bit position (counted from the LSB).

--- 11 unchanged lines hidden (view full) ---

5022 IN_DEST is non-zero if this is a reference in the destination of a
5023 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
5024 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5025 be used.
5026
5027 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
5028 ZERO_EXTRACT should be built even for bits starting at bit 0.
5029
5030 MODE is the desired mode of the result (if IN_DEST == 0). */
5031
5032static rtx
5033make_extraction (mode, inner, pos, pos_rtx, len,
5034 unsignedp, in_dest, in_compare)
5035 enum machine_mode mode;
5036 rtx inner;
5037 int pos;
5038 rtx pos_rtx;
5039 int len;
5040 int unsignedp;
5041 int in_dest, in_compare;
5042{
5043 /* This mode describes the size of the storage area
5044 to fetch the overall value from. Within that, we
5045 ignore the POS lowest bits, etc. */
5046 enum machine_mode is_mode = GET_MODE (inner);
5047 enum machine_mode inner_mode;
5048 enum machine_mode wanted_mem_mode = byte_mode;
5049 enum machine_mode pos_mode = word_mode;
5050 enum machine_mode extraction_mode = word_mode;
5051 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5052 int spans_byte = 0;
5053 rtx new = 0;
5054 rtx orig_pos_rtx = pos_rtx;
5055 int orig_pos;
5056

--- 30 unchanged lines hidden (view full) ---

5087 boundary and we can change the mode of the memory reference. However,
5088 we cannot directly access the MEM if we have a USE and the underlying
5089 MEM is not TMODE. This combination means that MEM was being used in a
5090 context where bits outside its mode were being referenced; that is only
5091 valid in bit-field insns. */
5092
5093 if (tmode != BLKmode
5094 && ! (spans_byte && inner_mode != tmode)
5095 && ((pos_rtx == 0 && pos == 0 && GET_CODE (inner) != MEM
5096 && (! in_dest
5097 || (GET_CODE (inner) == REG
5098 && (movstrict_optab->handlers[(int) tmode].insn_code
5099 != CODE_FOR_nothing))))
5100 || (GET_CODE (inner) == MEM && pos_rtx == 0
5101 && (pos
5102 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
5103 : BITS_PER_UNIT)) == 0
5104 /* We can't do this if we are widening INNER_MODE (it
5105 may not be aligned, for one thing). */
5106 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
5107 && (inner_mode == tmode
5108 || (! mode_dependent_address_p (XEXP (inner, 0))
5109 && ! MEM_VOLATILE_P (inner))))))
5110 {
5111 /* If INNER is a MEM, make a new MEM that encompasses just the desired
5112 field. If the original and current mode are the same, we need not
5113 adjust the offset. Otherwise, we do if bytes big endian.
5114
5115 If INNER is not a MEM, get a piece consisting of the just the field
5116 of interest (in this case POS must be 0). */
5117
5118 if (GET_CODE (inner) == MEM)
5119 {
5120 int offset;
5121 /* POS counts from lsb, but make OFFSET count in memory order. */
5122 if (BYTES_BIG_ENDIAN)
5123 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5124 else
5125 offset = pos / BITS_PER_UNIT;
5126
5127 new = gen_rtx (MEM, tmode, plus_constant (XEXP (inner, 0), offset));
5128 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
5129 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
5130 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
5131 }
5132 else if (GET_CODE (inner) == REG)
5133 {
5134 /* We can't call gen_lowpart_for_combine here since we always want
5135 a SUBREG and it would sometimes return a new hard register. */
5136 if (tmode != inner_mode)
5137 new = gen_rtx (SUBREG, tmode, inner,
5138 (WORDS_BIG_ENDIAN
5139 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
5140 ? ((GET_MODE_SIZE (inner_mode)
5141 - GET_MODE_SIZE (tmode))
5142 / UNITS_PER_WORD)
5143 : 0));
5144 else
5145 new = inner;
5146 }
5147 else
5148 new = force_to_mode (inner, tmode,
5149 len >= HOST_BITS_PER_WIDE_INT
5150 ? GET_MODE_MASK (tmode)
5151 : ((HOST_WIDE_INT) 1 << len) - 1,
5152 NULL_RTX, 0);
5153
5154 /* If this extraction is going into the destination of a SET,
5155 make a STRICT_LOW_PART unless we made a MEM. */
5156
5157 if (in_dest)
5158 return (GET_CODE (new) == MEM ? new
5159 : (GET_CODE (new) != SUBREG
5160 ? gen_rtx (CLOBBER, tmode, const0_rtx)
5161 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
5162
5163 /* Otherwise, sign- or zero-extend unless we already are in the
5164 proper mode. */
5165
5166 return (mode == tmode ? new
5167 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
5168 mode, new));

--- 11 unchanged lines hidden (view full) ---

5180 is not 1. In all other cases, we would only be going outside
5181 out object in cases when an original shift would have been
5182 undefined. */
5183 if (! spans_byte
5184 && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
5185 || (pos_rtx != 0 && len != 1)))
5186 return 0;
5187
5188 /* Get the mode to use should INNER be a MEM, the mode for the position,
5189 and the mode for the result. */
5190#ifdef HAVE_insv
5191 if (in_dest)
5192 {
5193 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
5194 pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
5195 extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
5196 }
5197#endif
5198
5199#ifdef HAVE_extzv
5200 if (! in_dest && unsignedp)
5201 {
5202 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
5203 pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
5204 extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
5205 }
5206#endif
5207
5208#ifdef HAVE_extv
5209 if (! in_dest && ! unsignedp)
5210 {
5211 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
5212 pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
5213 extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
5214 }
5215#endif
5216
5217 /* Never narrow an object, since that might not be safe. */
5218
5219 if (mode != VOIDmode
5220 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
5221 extraction_mode = mode;
5222
5223 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
5224 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5225 pos_mode = GET_MODE (pos_rtx);
5226
5227 /* If this is not from memory or we have to change the mode of memory and
5228 cannot, the desired mode is EXTRACTION_MODE. */
5229 if (GET_CODE (inner) != MEM
5230 || (inner_mode != wanted_mem_mode
5231 && (mode_dependent_address_p (XEXP (inner, 0))
5232 || MEM_VOLATILE_P (inner))))
5233 wanted_mem_mode = extraction_mode;
5234
5235 orig_pos = pos;
5236
5237 if (BITS_BIG_ENDIAN)
5238 {
5239 /* If position is constant, compute new position. Otherwise,
5240 build subtraction. */
5241 if (pos_rtx == 0)
5242 pos = (MAX (GET_MODE_BITSIZE (is_mode),
5243 GET_MODE_BITSIZE (wanted_mem_mode))
5244 - len - pos);
5245 else
5246 pos_rtx
5247 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
5248 GEN_INT (MAX (GET_MODE_BITSIZE (is_mode),
5249 GET_MODE_BITSIZE (wanted_mem_mode))
5250 - len),
5251 pos_rtx);
5252 }
5253
5254 /* If INNER has a wider mode, make it smaller. If this is a constant
5255 extract, try to adjust the byte to point to the byte containing
5256 the value. */
5257 if (wanted_mem_mode != VOIDmode
5258 && GET_MODE_SIZE (wanted_mem_mode) < GET_MODE_SIZE (is_mode)
5259 && ((GET_CODE (inner) == MEM
5260 && (inner_mode == wanted_mem_mode
5261 || (! mode_dependent_address_p (XEXP (inner, 0))
5262 && ! MEM_VOLATILE_P (inner))))))
5263 {
5264 int offset = 0;
5265
5266 /* The computations below will be correct if the machine is big
5267 endian in both bits and bytes or little endian in bits and bytes.
5268 If it is mixed, we must adjust. */
5269
5270 /* If bytes are big endian and we had a paradoxical SUBREG, we must
5271 adjust OFFSET to compensate. */
5272 if (BYTES_BIG_ENDIAN
5273 && ! spans_byte
5274 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
5275 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
5276
5277 /* If this is a constant position, we can move to the desired byte. */
5278 if (pos_rtx == 0)
5279 {
5280 offset += pos / BITS_PER_UNIT;
5281 pos %= GET_MODE_BITSIZE (wanted_mem_mode);
5282 }
5283
5284 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
5285 && ! spans_byte
5286 && is_mode != wanted_mem_mode)
5287 offset = (GET_MODE_SIZE (is_mode)
5288 - GET_MODE_SIZE (wanted_mem_mode) - offset);
5289
5290 if (offset != 0 || inner_mode != wanted_mem_mode)
5291 {
5292 rtx newmem = gen_rtx (MEM, wanted_mem_mode,
5293 plus_constant (XEXP (inner, 0), offset));
5294 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
5295 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
5296 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
5297 inner = newmem;
5298 }
5299 }
5300
5301 /* If INNER is not memory, we can always get it into the proper mode. */
5302 else if (GET_CODE (inner) != MEM)
5303 inner = force_to_mode (inner, extraction_mode,
5304 pos_rtx || len + orig_pos >= HOST_BITS_PER_WIDE_INT
5305 ? GET_MODE_MASK (extraction_mode)
5306 : (((HOST_WIDE_INT) 1 << len) - 1) << orig_pos,
5307 NULL_RTX, 0);
5308
5309 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
5310 have to zero extend. Otherwise, we can just use a SUBREG. */
5311 if (pos_rtx != 0
5312 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
5313 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
5314 else if (pos_rtx != 0
5315 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5316 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
5317
5318 /* Make POS_RTX unless we already have it and it is correct. If we don't
5319 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
5320 be a CONST_INT. */
5321 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
5322 pos_rtx = orig_pos_rtx;
5323
5324 else if (pos_rtx == 0)
5325 pos_rtx = GEN_INT (pos);
5326
5327 /* Make the required operation. See if we can use existing rtx. */
5328 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,

--- 33 unchanged lines hidden (view full) ---

5362 return gen_unary (code, mode, mode, tem);
5363
5364 break;
5365
5366 case PLUS: case IOR: case XOR: case AND:
5367 /* If we can safely shift this constant and we find the inner shift,
5368 make a new operation. */
5369 if (GET_CODE (XEXP (x,1)) == CONST_INT
5370 && (INTVAL (XEXP (x, 1)) & (((HOST_WIDE_INT) 1 << count)) - 1) == 0
5371 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
5372 return gen_binary (code, mode, tem,
5373 GEN_INT (INTVAL (XEXP (x, 1)) >> count));
5374
5375 break;
5376 }
5377
5378 return 0;
5379}
5380
5381/* Look at the expression rooted at X. Look for expressions
5382 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
5383 Form these expressions.

--- 87 unchanged lines hidden (view full) ---

5471 else if ((GET_CODE (XEXP (x, 0)) == XOR
5472 || GET_CODE (XEXP (x, 0)) == IOR)
5473 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
5474 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
5475 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5476 {
5477 /* Apply the distributive law, and then try to make extractions. */
5478 new = gen_rtx_combine (GET_CODE (XEXP (x, 0)), mode,
5479 gen_rtx (AND, mode, XEXP (XEXP (x, 0), 0),
5480 XEXP (x, 1)),
5481 gen_rtx (AND, mode, XEXP (XEXP (x, 0), 1),
5482 XEXP (x, 1)));
5483 new = make_compound_operation (new, in_code);
5484 }
5485
5486 /* If we are have (and (rotate X C) M) and C is larger than the number
5487 of bits in M, this is an extraction. */
5488
5489 else if (GET_CODE (XEXP (x, 0)) == ROTATE
5490 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT

--- 61 unchanged lines hidden (view full) ---

5552 {
5553 new = gen_rtx_combine (ASHIFTRT, mode,
5554 make_compound_operation (XEXP (x, 0),
5555 next_code),
5556 XEXP (x, 1));
5557 break;
5558 }
5559
5560 /* ... fall through ... */
5561
5562 case ASHIFTRT:
5563 lhs = XEXP (x, 0);
5564 rhs = XEXP (x, 1);
5565
5566 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
5567 this is a SIGN_EXTRACT. */
5568 if (GET_CODE (rhs) == CONST_INT

--- 40 unchanged lines hidden (view full) ---

5609
5610 /* If we have something other than a SUBREG, we might have
5611 done an expansion, so rerun outselves. */
5612 if (GET_CODE (newer) != SUBREG)
5613 newer = make_compound_operation (newer, in_code);
5614
5615 return newer;
5616 }
5617 }
5618
5619 if (new)
5620 {
5621 x = gen_lowpart_for_combine (mode, new);
5622 code = GET_CODE (x);
5623 }
5624

--- 62 unchanged lines hidden (view full) ---

5687 int just_select;
5688{
5689 enum rtx_code code = GET_CODE (x);
5690 int next_select = just_select || code == XOR || code == NOT || code == NEG;
5691 enum machine_mode op_mode;
5692 unsigned HOST_WIDE_INT fuller_mask, nonzero;
5693 rtx op0, op1, temp;
5694
5695 /* If this is a CALL, don't do anything. Some of the code below
5696 will do the wrong thing since the mode of a CALL is VOIDmode. */
5697 if (code == CALL)
5698 return x;
5699
5700 /* We want to perform the operation is its present mode unless we know
5701 that the operation is valid in MODE, in which case we do the operation
5702 in MODE. */
5703 op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
5704 && code_to_optab[(int) code] != 0
5705 && (code_to_optab[(int) code]->handlers[(int) mode].insn_code

--- 53 unchanged lines hidden (view full) ---

5759 MASK are already known to be zero in X, we need not do anything. */
5760 if (GET_MODE (x) == mode && code != SUBREG && (~ mask & nonzero) == 0)
5761 return x;
5762
5763 switch (code)
5764 {
5765 case CLOBBER:
5766 /* If X is a (clobber (const_int)), return it since we know we are
5767 generating something that won't match. */
5768 return x;
5769
5770 case USE:
5771 /* X is a (use (mem ..)) that was made from a bit-field extraction that
5772 spanned the boundary of the MEM. If we are now masking so it is
5773 within that boundary, we don't need the USE any more. */
5774 if (! BITS_BIG_ENDIAN
5775 && (mask & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)

--- 89 unchanged lines hidden (view full) ---

5865 /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
5866 number, sign extend it. */
5867
5868 if (width < HOST_BITS_PER_WIDE_INT
5869 && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
5870 smask |= (HOST_WIDE_INT) -1 << width;
5871
5872 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5873 && exact_log2 (- smask) >= 0
5874 && (nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0
5875 && (INTVAL (XEXP (x, 1)) & ~ mask) != 0)
5876 return force_to_mode (plus_constant (XEXP (x, 0),
5877 INTVAL (XEXP (x, 1)) & mask),
5878 mode, mask, reg, next_select);
5879 }
5880
5881 /* ... fall through ... */
5882
5883 case MINUS:
5884 case MULT:
5885 /* For PLUS, MINUS and MULT, we need any bits less significant than the
5886 most significant bit in MASK since carries from those bits will
5887 affect the bits we are interested in. */
5888 mask = fuller_mask;
5889 goto binop;

--- 9 unchanged lines hidden (view full) ---

5899 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5900 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5901 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5902 && GET_CODE (XEXP (x, 1)) == CONST_INT
5903 && ((INTVAL (XEXP (XEXP (x, 0), 1))
5904 + floor_log2 (INTVAL (XEXP (x, 1))))
5905 < GET_MODE_BITSIZE (GET_MODE (x)))
5906 && (INTVAL (XEXP (x, 1))
5907 & ~ nonzero_bits (XEXP (x, 0), GET_MODE (x)) == 0))
5908 {
5909 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
5910 << INTVAL (XEXP (XEXP (x, 0), 1)));
5911 temp = gen_binary (GET_CODE (x), GET_MODE (x),
5912 XEXP (XEXP (x, 0), 0), temp);
5913 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (x, 1));
5914 return force_to_mode (x, mode, mask, reg, next_select);
5915 }
5916
5917 binop:
5918 /* For most binary operations, just propagate into the operation and
5919 change the mode if we have an operation of that mode. */
5920
5921 op0 = gen_lowpart_for_combine (op_mode,

--- 114 unchanged lines hidden (view full) ---

6036 int i = -1;
6037
6038 /* If the considered data is wider then HOST_WIDE_INT, we can't
6039 represent a mask for all its bits in a single scalar.
6040 But we only care about the lower bits, so calculate these. */
6041
6042 if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
6043 {
6044 nonzero = ~(HOST_WIDE_INT)0;
6045
6046 /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6047 is the number of bits a full-width mask would have set.
6048 We need only shift if these are fewer than nonzero can
6049 hold. If not, we must keep all bits set in nonzero. */
6050
6051 if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6052 < HOST_BITS_PER_WIDE_INT)

--- 87 unchanged lines hidden (view full) ---

6140 {
6141 temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
6142 temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
6143 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
6144
6145 return force_to_mode (x, mode, mask, reg, next_select);
6146 }
6147
6148 unop:
6149 op0 = gen_lowpart_for_combine (op_mode,
6150 force_to_mode (XEXP (x, 0), mode, mask,
6151 reg, next_select));
6152 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
6153 x = gen_unary (code, op_mode, op_mode, op0);
6154 break;
6155
6156 case NE:
6157 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
6158 in STORE_FLAG_VALUE and FOO has no bits that might be nonzero not
6159 in CONST. */
6160 if ((mask & ~ STORE_FLAG_VALUE) == 0 && XEXP (x, 0) == const0_rtx
6161 && (nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0)
6162 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6163
6164 break;
6165
6166 case IF_THEN_ELSE:
6167 /* We have no way of knowing if the IF_THEN_ELSE can itself be
6168 written in a narrower mode. We play it safe and do not do so. */
6169
6170 SUBST (XEXP (x, 1),
6171 gen_lowpart_for_combine (GET_MODE (x),
6172 force_to_mode (XEXP (x, 1), mode,
6173 mask, reg, next_select)));
6174 SUBST (XEXP (x, 2),
6175 gen_lowpart_for_combine (GET_MODE (x),
6176 force_to_mode (XEXP (x, 2), mode,
6177 mask, reg,next_select)));
6178 break;
6179 }
6180
6181 /* Ensure we return a value of the proper mode. */
6182 return gen_lowpart_for_combine (mode, x);
6183}
6184
6185/* Return nonzero if X is an expression that has one of two values depending on
6186 whether some other value is zero or nonzero. In that case, we return the

--- 35 unchanged lines hidden (view full) ---

6222 || GET_RTX_CLASS (code) == '<')
6223 {
6224 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
6225 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
6226
6227 if ((cond0 != 0 || cond1 != 0)
6228 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
6229 {
6230 *ptrue = gen_binary (code, mode, true0, true1);
6231 *pfalse = gen_binary (code, mode, false0, false1);
6232 return cond0 ? cond0 : cond1;
6233 }
6234
6235#if STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1
6236
6237 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
6238 operands is zero when the other is non-zero, and vice-versa. */
6239
6240 if ((code == PLUS || code == IOR || code == XOR || code == MINUS
6241 || code == UMAX)
6242 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6243 {
6244 rtx op0 = XEXP (XEXP (x, 0), 1);
6245 rtx op1 = XEXP (XEXP (x, 1), 1);
6246
6247 cond0 = XEXP (XEXP (x, 0), 0);
6248 cond1 = XEXP (XEXP (x, 1), 0);

--- 16 unchanged lines hidden (view full) ---

6265 ? gen_unary (NEG, mode, mode, op1) : op1),
6266 const_true_rtx);
6267 return cond0;
6268 }
6269 }
6270
6271 /* Similarly for MULT, AND and UMIN, execpt that for these the result
6272 is always zero. */
6273 if ((code == MULT || code == AND || code == UMIN)
6274 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6275 {
6276 cond0 = XEXP (XEXP (x, 0), 0);
6277 cond1 = XEXP (XEXP (x, 1), 0);
6278
6279 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6280 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6281 && reversible_comparison_p (cond1)

--- 5 unchanged lines hidden (view full) ---

6287 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6288 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6289 && ! side_effects_p (x))
6290 {
6291 *ptrue = *pfalse = const0_rtx;
6292 return cond0;
6293 }
6294 }
6295#endif
6296 }
6297
6298 else if (code == IF_THEN_ELSE)
6299 {
6300 /* If we have IF_THEN_ELSE already, extract the condition and
6301 canonicalize it if it is NE or EQ. */
6302 cond0 = XEXP (x, 0);
6303 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);

--- 81 unchanged lines hidden (view full) ---

6385 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
6386 switch (cond)
6387 {
6388 case GE: case GT: case EQ:
6389 return XEXP (x, 0);
6390 case LT: case LE:
6391 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), GET_MODE (XEXP (x, 0)),
6392 XEXP (x, 0));
6393 }
6394
6395 /* The only other cases we handle are MIN, MAX, and comparisons if the
6396 operands are the same as REG and VAL. */
6397
6398 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
6399 {
6400 if (rtx_equal_p (XEXP (x, 0), val))

--- 20 unchanged lines hidden (view full) ---

6421 case GE: case GT:
6422 return unsignedp ? x : XEXP (x, 1);
6423 case LE: case LT:
6424 return unsignedp ? x : XEXP (x, 0);
6425 case GEU: case GTU:
6426 return unsignedp ? XEXP (x, 1) : x;
6427 case LEU: case LTU:
6428 return unsignedp ? XEXP (x, 0) : x;
6429 }
6430 }
6431 }
6432 }
6433
6434 fmt = GET_RTX_FORMAT (code);
6435 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6436 {
6437 if (fmt[i] == 'e')
6438 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
6439 else if (fmt[i] == 'E')
6440 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6441 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
6442 cond, reg, val));
6443 }
6444
6445 return x;
6446}
6447
6448/* See if X, a SET operation, can be rewritten as a bit-field assignment.
6449 Return that assignment if so.
6450
6451 We only handle the most common cases. */
6452
6453static rtx
6454make_field_assignment (x)
6455 rtx x;
6456{
6457 rtx dest = SET_DEST (x);
6458 rtx src = SET_SRC (x);
6459 rtx assign;
6460 HOST_WIDE_INT c1;
6461 int pos, len;
6462 rtx other;
6463 enum machine_mode mode;
6464
6465 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
6466 a clear of a one-bit field. We will have changed it to
6467 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
6468 for a SUBREG. */
6469
6470 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
6471 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
6472 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
6473 && (rtx_equal_p (dest, XEXP (src, 1))
6474 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6475 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
6476 {
6477 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
6478 1, 1, 1, 0);
6479 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
6480 }
6481
6482 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
6483 && subreg_lowpart_p (XEXP (src, 0))
6484 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
6485 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
6486 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
6487 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
6488 && (rtx_equal_p (dest, XEXP (src, 1))
6489 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6490 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
6491 {
6492 assign = make_extraction (VOIDmode, dest, 0,
6493 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
6494 1, 1, 1, 0);
6495 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
6496 }
6497
6498 /* If SRC is (ior (ashift (const_int 1) POS DEST)), this is a set of a
6499 one-bit field. */
6500 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
6501 && XEXP (XEXP (src, 0), 0) == const1_rtx
6502 && (rtx_equal_p (dest, XEXP (src, 1))
6503 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6504 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
6505 {
6506 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
6507 1, 1, 1, 0);
6508 return gen_rtx (SET, VOIDmode, assign, const1_rtx);
6509 }
6510
6511 /* The other case we handle is assignments into a constant-position
6512 field. They look like (ior (and DEST C1) OTHER). If C1 represents
6513 a mask that has all one bits except for a group of zero bits and
6514 OTHER is known to have zeros where C1 has ones, this is such an
6515 assignment. Compute the position and length from C1. Shift OTHER
6516 to the appropriate position, force it to the required mode, and
6517 make the extraction. Check for the AND in both operands. */
6518
6519 if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == AND
6520 && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT
6521 && (rtx_equal_p (XEXP (XEXP (src, 0), 0), dest)
6522 || rtx_equal_p (XEXP (XEXP (src, 0), 0), get_last_value (dest))
6523 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 0), 1)), dest)))
6524 c1 = INTVAL (XEXP (XEXP (src, 0), 1)), other = XEXP (src, 1);
6525 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 1)) == AND
6526 && GET_CODE (XEXP (XEXP (src, 1), 1)) == CONST_INT
6527 && (rtx_equal_p (XEXP (XEXP (src, 1), 0), dest)
6528 || rtx_equal_p (XEXP (XEXP (src, 1), 0), get_last_value (dest))
6529 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 1), 0)),
6530 dest)))
6531 c1 = INTVAL (XEXP (XEXP (src, 1), 1)), other = XEXP (src, 0);
6532 else
6533 return x;
6534
6535 pos = get_pos_from_mask (c1 ^ GET_MODE_MASK (GET_MODE (dest)), &len);
6536 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
6537 || (GET_MODE_BITSIZE (GET_MODE (other)) <= HOST_BITS_PER_WIDE_INT
6538 && (c1 & nonzero_bits (other, GET_MODE (other))) != 0))
6539 return x;
6540
6541 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
6542
6543 /* The mode to use for the source is the mode of the assignment, or of
6544 what is inside a possible STRICT_LOW_PART. */
6545 mode = (GET_CODE (assign) == STRICT_LOW_PART
6546 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
6547
6548 /* Shift OTHER right POS places and make it the source, restricting it
6549 to the proper length and mode. */

--- 29 unchanged lines hidden (view full) ---

6579
6580 /* The outer operation can only be one of the following: */
6581 if (code != IOR && code != AND && code != XOR
6582 && code != PLUS && code != MINUS)
6583 return x;
6584
6585 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
6586
6587 /* If either operand is a primitive we can't do anything, so get out fast. */
6588 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
6589 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
6590 return x;
6591
6592 lhs = expand_compound_operation (lhs);
6593 rhs = expand_compound_operation (rhs);
6594 inner_code = GET_CODE (lhs);
6595 if (inner_code != GET_CODE (rhs))

--- 168 unchanged lines hidden (view full) ---

6764 if we already had one (just check for the simplest cases). */
6765 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
6766 && GET_MODE (XEXP (x, 0)) == mode
6767 && SUBREG_REG (XEXP (x, 0)) == varop)
6768 varop = XEXP (x, 0);
6769 else
6770 varop = gen_lowpart_for_combine (mode, varop);
6771
6772 /* If we can't make the SUBREG, try to return what we were given. */
6773 if (GET_CODE (varop) == CLOBBER)
6774 return x ? x : varop;
6775
6776 /* If we are only masking insignificant bits, return VAROP. */
6777 if (constop == nonzero)
6778 x = varop;
6779
6780 /* Otherwise, return an AND. See how much, if any, of X we can use. */

--- 7 unchanged lines hidden (view full) ---

6788 SUBST (XEXP (x, 1), GEN_INT (constop));
6789
6790 SUBST (XEXP (x, 0), varop);
6791 }
6792
6793 return x;
6794}
6795
6796/* Given an expression, X, compute which bits in X can be non-zero.
6797 We don't care about bits outside of those defined in MODE.
6798
6799 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
6800 a shift, AND, or zero_extract, we can do better. */
6801
6802static unsigned HOST_WIDE_INT
6803nonzero_bits (x, mode)

--- 55 unchanged lines hidden (view full) ---

6859#endif
6860
6861#ifdef STACK_BOUNDARY
6862 /* If this is the stack pointer, we may know something about its
6863 alignment. If PUSH_ROUNDING is defined, it is possible for the
6864 stack to be momentarily aligned only to that amount, so we pick
6865 the least alignment. */
6866
6867 if (x == stack_pointer_rtx)
6868 {
6869 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6870
6871#ifdef PUSH_ROUNDING
6872 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
6873#endif
6874
6875 /* We must return here, otherwise we may get a worse result from
6876 one of the choices below. There is nothing useful below as
6877 far as the stack pointer is concerned. */
6878 return nonzero &= ~ (sp_alignment - 1);
6879 }
6880#endif
6881
6882 /* If X is a register whose nonzero bits value is current, use it.
6883 Otherwise, if X is a register whose value we can find, use that
6884 value. Otherwise, use the previously-computed global nonzero bits
6885 for this register. */
6886
6887 if (reg_last_set_value[REGNO (x)] != 0
6888 && reg_last_set_mode[REGNO (x)] == mode
6889 && (reg_n_sets[REGNO (x)] == 1
6890 || reg_last_set_label[REGNO (x)] == label_tick)
6891 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
6892 return reg_last_set_nonzero_bits[REGNO (x)];
6893
6894 tem = get_last_value (x);
6895
6896 if (tem)
6897 {

--- 55 unchanged lines hidden (view full) ---

6953 now done above. */
6954
6955 if (GET_MODE_CLASS (mode) == MODE_INT
6956 && mode_width <= HOST_BITS_PER_WIDE_INT)
6957 nonzero = STORE_FLAG_VALUE;
6958 break;
6959
6960 case NEG:
6961 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6962 == GET_MODE_BITSIZE (GET_MODE (x)))
6963 nonzero = 1;
6964
6965 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
6966 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
6967 break;
6968
6969 case ABS:
6970 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6971 == GET_MODE_BITSIZE (GET_MODE (x)))
6972 nonzero = 1;
6973 break;
6974
6975 case TRUNCATE:
6976 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
6977 break;
6978
6979 case ZERO_EXTEND:
6980 nonzero &= nonzero_bits (XEXP (x, 0), mode);

--- 4 unchanged lines hidden (view full) ---

6985 case SIGN_EXTEND:
6986 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
6987 Otherwise, show all the bits in the outer mode but not the inner
6988 may be non-zero. */
6989 inner_nz = nonzero_bits (XEXP (x, 0), mode);
6990 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6991 {
6992 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6993 if (inner_nz &
6994 (((HOST_WIDE_INT) 1
6995 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
6996 inner_nz |= (GET_MODE_MASK (mode)
6997 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
6998 }
6999
7000 nonzero &= inner_nz;
7001 break;
7002
7003 case AND:

--- 27 unchanged lines hidden (view full) ---

7031 HOST_WIDE_INT op1_maybe_minusp
7032 = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
7033 int result_width = mode_width;
7034 int result_low = 0;
7035
7036 switch (code)
7037 {
7038 case PLUS:
7039 result_width = MAX (width0, width1) + 1;
7040 result_low = MIN (low0, low1);
7041 break;
7042 case MINUS:
7043 result_low = MIN (low0, low1);
7044 break;
7045 case MULT:
7046 result_width = width0 + width1;

--- 10 unchanged lines hidden (view full) ---

7057 if (! op0_maybe_minusp && ! op1_maybe_minusp)
7058 result_width = MIN (width0, width1);
7059 result_low = MIN (low0, low1);
7060 break;
7061 case UMOD:
7062 result_width = MIN (width0, width1);
7063 result_low = MIN (low0, low1);
7064 break;
7065 }
7066
7067 if (result_width < mode_width)
7068 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
7069
7070 if (result_low > 0)
7071 nonzero &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
7072 }

--- 18 unchanged lines hidden (view full) ---

7091 machines, we can compute this from which bits of the inner
7092 object might be nonzero. */
7093 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
7094 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7095 <= HOST_BITS_PER_WIDE_INT))
7096 {
7097 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
7098
7099#ifndef WORD_REGISTER_OPERATIONS
7100 /* On many CISC machines, accessing an object in a wider mode
7101 causes the high-order bits to become undefined. So they are
7102 not known to be zero. */
7103 if (GET_MODE_SIZE (GET_MODE (x))
7104 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7105 nonzero |= (GET_MODE_MASK (GET_MODE (x))
7106 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
7107#endif
7108 }
7109 break;
7110
7111 case ASHIFTRT:
7112 case LSHIFTRT:
7113 case ASHIFT:
7114 case ROTATE:
7115 /* The nonzero bits are in two classes: any bits within MODE

--- 43 unchanged lines hidden (view full) ---

7159 /* This is at most the number of bits in the mode. */
7160 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
7161 break;
7162
7163 case IF_THEN_ELSE:
7164 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
7165 | nonzero_bits (XEXP (x, 2), mode));
7166 break;
7167 }
7168
7169 return nonzero;
7170}
7171
7172/* Return the number of bits at the high-order end of X that are known to
7173 be equal to the sign bit. X will be used in mode MODE; if MODE is
7174 VOIDmode, X will be used in its own mode. The returned value will always
7175 be between 1 and the number of bits in MODE. */
7176
7177static int
7178num_sign_bit_copies (x, mode)

--- 13 unchanged lines hidden (view full) ---

7192 if (mode == VOIDmode)
7193 mode = GET_MODE (x);
7194
7195 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
7196 return 1;
7197
7198 bitwidth = GET_MODE_BITSIZE (mode);
7199
7200 /* For a smaller object, just ignore the high bits. */
7201 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
7202 return MAX (1, (num_sign_bit_copies (x, GET_MODE (x))
7203 - (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)));
7204
7205#ifndef WORD_REGISTER_OPERATIONS
7206 /* If this machine does not do all register operations on the entire
7207 register and MODE is wider than the mode of X, we can say nothing
7208 at all about the high-order bits. */
7209 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
7210 return 1;
7211#endif
7212
7213 switch (code)
7214 {
7215 case REG:
7216
7217#ifdef POINTERS_EXTEND_UNSIGNED
7218 /* If pointers extend signed and this is a pointer in Pmode, say that
7219 all the bits above ptr_mode are known to be sign bit copies. */
7220 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
7221 && REGNO_POINTER_FLAG (REGNO (x)))
7222 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
7223#endif
7224
7225 if (reg_last_set_value[REGNO (x)] != 0
7226 && reg_last_set_mode[REGNO (x)] == mode
7227 && (reg_n_sets[REGNO (x)] == 1
7228 || reg_last_set_label[REGNO (x)] == label_tick)
7229 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7230 return reg_last_set_sign_bit_copies[REGNO (x)];
7231
7232 tem = get_last_value (x);
7233 if (tem != 0)
7234 return num_sign_bit_copies (tem, mode);
7235

--- 23 unchanged lines hidden (view full) ---

7259 /* If this is a SUBREG for a promoted object that is sign-extended
7260 and we are looking at it in a wider mode, we know that at least the
7261 high-order bits are known to be sign bit copies. */
7262
7263 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
7264 return MAX (bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1,
7265 num_sign_bit_copies (SUBREG_REG (x), mode));
7266
7267 /* For a smaller object, just ignore the high bits. */
7268 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
7269 {
7270 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
7271 return MAX (1, (num0
7272 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7273 - bitwidth)));
7274 }
7275

--- 22 unchanged lines hidden (view full) ---

7298 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
7299 break;
7300
7301 case SIGN_EXTEND:
7302 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7303 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
7304
7305 case TRUNCATE:
7306 /* For a smaller object, just ignore the high bits. */
7307 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
7308 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7309 - bitwidth)));
7310
7311 case NOT:
7312 return num_sign_bit_copies (XEXP (x, 0), mode);
7313
7314 case ROTATE: case ROTATERT:

--- 9 unchanged lines hidden (view full) ---

7324 }
7325 break;
7326
7327 case NEG:
7328 /* In general, this subtracts one sign bit copy. But if the value
7329 is known to be positive, the number of sign bit copies is the
7330 same as that of the input. Finally, if the input has just one bit
7331 that might be nonzero, all the bits are copies of the sign bit. */
7332 nonzero = nonzero_bits (XEXP (x, 0), mode);
7333 if (nonzero == 1)
7334 return bitwidth;
7335
7336 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7337 if (num0 > 1
7338 && bitwidth <= HOST_BITS_PER_WIDE_INT
7339 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
7340 num0--;
7341
7342 return num0;
7343
7344 case IOR: case AND: case XOR:
7345 case SMIN: case SMAX: case UMIN: case UMAX:
7346 /* Logical operations will preserve the number of sign-bit copies.

--- 27 unchanged lines hidden (view full) ---

7374 to be positive, we must allow for an additional bit since negating
7375 a negative number can remove one sign bit copy. */
7376
7377 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7378 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7379
7380 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
7381 if (result > 0
7382 && bitwidth <= HOST_BITS_PER_WIDE_INT
7383 && ((nonzero_bits (XEXP (x, 0), mode)
7384 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7385 && (nonzero_bits (XEXP (x, 1), mode)
7386 & ((HOST_WIDE_INT) 1 << (bitwidth - 1)) != 0))
7387 result--;
7388
7389 return MAX (1, result);
7390
7391 case UDIV:
7392 /* The result must be <= the first operand. */
7393 return num_sign_bit_copies (XEXP (x, 0), mode);
7394
7395 case UMOD:
7396 /* The result must be <= the scond operand. */
7397 return num_sign_bit_copies (XEXP (x, 1), mode);
7398
7399 case DIV:
7400 /* Similar to unsigned division, except that we have to worry about
7401 the case where the divisor is negative, in which case we have
7402 to add 1. */
7403 result = num_sign_bit_copies (XEXP (x, 0), mode);
7404 if (result > 1
7405 && bitwidth <= HOST_BITS_PER_WIDE_INT
7406 && (nonzero_bits (XEXP (x, 1), mode)
7407 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7408 result --;
7409
7410 return result;
7411
7412 case MOD:
7413 result = num_sign_bit_copies (XEXP (x, 1), mode);
7414 if (result > 1
7415 && bitwidth <= HOST_BITS_PER_WIDE_INT
7416 && (nonzero_bits (XEXP (x, 1), mode)
7417 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7418 result --;
7419
7420 return result;
7421
7422 case ASHIFTRT:
7423 /* Shifts by a constant add to the number of bits equal to the
7424 sign bit. */
7425 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7426 if (GET_CODE (XEXP (x, 1)) == CONST_INT

--- 12 unchanged lines hidden (view full) ---

7439 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7440 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
7441
7442 case IF_THEN_ELSE:
7443 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
7444 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
7445 return MIN (num0, num1);
7446
7447#if STORE_FLAG_VALUE == -1
7448 case EQ: case NE: case GE: case GT: case LE: case LT:
7449 case GEU: case GTU: case LEU: case LTU:
7450 return bitwidth;
7451#endif
7452 }
7453
7454 /* If we haven't been able to figure it out by one of the above rules,
7455 see if some of the high-order bits are known to be zero. If so,
7456 count those bits and return one less than that amount. If we can't
7457 safely compute the mask for this mode, always return BITWIDTH. */
7458
7459 if (bitwidth > HOST_BITS_PER_WIDE_INT)

--- 95 unchanged lines hidden (view full) ---

7555 const0 ^= const1;
7556 break;
7557 case PLUS:
7558 const0 += const1;
7559 break;
7560 case NEG:
7561 op0 = NIL;
7562 break;
7563 }
7564 }
7565
7566 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
7567 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
7568 return 0;
7569
7570 /* If the two constants aren't the same, we can't do anything. The

--- 5 unchanged lines hidden (view full) ---

7576 switch (op0)
7577 {
7578 case IOR:
7579 if (op1 == AND)
7580 /* (a & b) | b == b */
7581 op0 = SET;
7582 else /* op1 == XOR */
7583 /* (a ^ b) | b == a | b */
7584 ;
7585 break;
7586
7587 case XOR:
7588 if (op1 == AND)
7589 /* (a & b) ^ b == (~a) & b */
7590 op0 = AND, *pcomp_p = 1;
7591 else /* op1 == IOR */
7592 /* (a | b) ^ b == a & ~b */
7593 op0 = AND, *pconst0 = ~ const0;
7594 break;
7595
7596 case AND:
7597 if (op1 == IOR)
7598 /* (a | b) & b == b */
7599 op0 = SET;
7600 else /* op1 == XOR */
7601 /* (a ^ b) & b) == (~a) & b */
7602 *pcomp_p = 1;
7603 break;
7604 }
7605
7606 /* Check for NO-OP cases. */
7607 const0 &= GET_MODE_MASK (mode);
7608 if (const0 == 0
7609 && (op0 == IOR || op0 == XOR || op0 == PLUS))
7610 op0 = NIL;
7611 else if (const0 == 0 && op0 == AND)

--- 52 unchanged lines hidden (view full) ---

7664 /* If we were given an invalid count, don't do anything except exactly
7665 what was requested. */
7666
7667 if (count < 0 || count > GET_MODE_BITSIZE (mode))
7668 {
7669 if (x)
7670 return x;
7671
7672 return gen_rtx (code, mode, varop, GEN_INT (count));
7673 }
7674
7675 /* Unless one of the branches of the `if' in this loop does a `continue',
7676 we will `break' the loop after the `if'. */
7677
7678 while (count != 0)
7679 {
7680 /* If we have an operand of (clobber (const_int 0)), just return that

--- 8 unchanged lines hidden (view full) ---

7689
7690 /* Convert ROTATERT to ROTATE. */
7691 if (code == ROTATERT)
7692 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
7693
7694 /* We need to determine what mode we will do the shift in. If the
7695 shift is a right shift or a ROTATE, we must always do it in the mode
7696 it was originally done in. Otherwise, we can do it in MODE, the
7697 widest mode encountered. */
7698 shift_mode
7699 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
7700 ? result_mode : mode);
7701
7702 /* Handle cases where the count is greater than the size of the mode
7703 minus 1. For ASHIFT, use the size minus one as the count (this can
7704 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
7705 take the count modulo the size. For other shifts, the result is

--- 16 unchanged lines hidden (view full) ---

7722 varop = const0_rtx;
7723 count = 0;
7724 break;
7725 }
7726 }
7727
7728 /* Negative counts are invalid and should not have been made (a
7729 programmer-specified negative count should have been handled
7730 above). */
7731 else if (count < 0)
7732 abort ();
7733
7734 /* An arithmetic right shift of a quantity known to be -1 or 0
7735 is a no-op. */
7736 if (code == ASHIFTRT
7737 && (num_sign_bit_copies (varop, shift_mode)
7738 == GET_MODE_BITSIZE (shift_mode)))

--- 43 unchanged lines hidden (view full) ---

7782 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
7783 if ((code == ASHIFTRT || code == LSHIFTRT)
7784 && ! mode_dependent_address_p (XEXP (varop, 0))
7785 && ! MEM_VOLATILE_P (varop)
7786 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
7787 MODE_INT, 1)) != BLKmode)
7788 {
7789 if (BYTES_BIG_ENDIAN)
7790 new = gen_rtx (MEM, tmode, XEXP (varop, 0));
7791 else
7792 new = gen_rtx (MEM, tmode,
7793 plus_constant (XEXP (varop, 0),
7794 count / BITS_PER_UNIT));
7795 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
7796 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
7797 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
7798 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
7799 : ZERO_EXTEND, mode, new);
7800 count = 0;
7801 continue;
7802 }

--- 72 unchanged lines hidden (view full) ---

7875 /* If we are extracting just the sign bit of an arithmetic right
7876 shift, that shift is not needed. */
7877 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
7878 {
7879 varop = XEXP (varop, 0);
7880 continue;
7881 }
7882
7883 /* ... fall through ... */
7884
7885 case LSHIFTRT:
7886 case ASHIFT:
7887 case ROTATE:
7888 /* Here we have two nested shifts. The result is usually the
7889 AND of a new shift with a mask. We compute the result below. */
7890 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7891 && INTVAL (XEXP (varop, 1)) >= 0

--- 319 unchanged lines hidden (view full) ---

8211 case MINUS:
8212 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
8213 with C the size of VAROP - 1 and the shift is logical if
8214 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8215 we have a (gt X 0) operation. If the shift is arithmetic with
8216 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
8217 we have a (neg (gt X 0)) operation. */
8218
8219 if (GET_CODE (XEXP (varop, 0)) == ASHIFTRT
8220 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
8221 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8222 && (code == LSHIFTRT || code == ASHIFTRT)
8223 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
8224 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
8225 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8226 {
8227 count = 0;
8228 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
8229 const0_rtx);
8230
8231 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8232 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8233
8234 continue;
8235 }
8236 break;
8237 }
8238
8239 break;
8240 }
8241
8242 /* We need to determine what mode to do the shift in. If the shift is
8243 a right shift or ROTATE, we must always do it in the mode it was
8244 originally done in. Otherwise, we can do it in MODE, the widest mode

--- 21 unchanged lines hidden (view full) ---

8266
8267 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
8268 && GET_MODE (XEXP (x, 0)) == shift_mode
8269 && SUBREG_REG (XEXP (x, 0)) == varop)
8270 varop = XEXP (x, 0);
8271 else if (GET_MODE (varop) != shift_mode)
8272 varop = gen_lowpart_for_combine (shift_mode, varop);
8273
8274 /* If we can't make the SUBREG, try to return what we were given. */
8275 if (GET_CODE (varop) == CLOBBER)
8276 return x ? x : varop;
8277
8278 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
8279 if (new != 0)
8280 x = new;
8281 else
8282 {

--- 133 unchanged lines hidden (view full) ---

8416
8417 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
8418 }
8419
8420 /* If we had any clobbers to add, make a new pattern than contains
8421 them. Then check to make sure that all of them are dead. */
8422 if (num_clobbers_to_add)
8423 {
8424 rtx newpat = gen_rtx (PARALLEL, VOIDmode,
8425 gen_rtvec (GET_CODE (pat) == PARALLEL
8426 ? XVECLEN (pat, 0) + num_clobbers_to_add
8427 : num_clobbers_to_add + 1));
8428
8429 if (GET_CODE (pat) == PARALLEL)
8430 for (i = 0; i < XVECLEN (pat, 0); i++)
8431 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
8432 else
8433 XVECEXP (newpat, 0, 0) = pat;
8434
8435 add_clobbers (newpat, insn_code_number);
8436
8437 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
8438 i < XVECLEN (newpat, 0); i++)
8439 {
8440 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
8441 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
8442 return -1;
8443 else if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == SCRATCH)
8444 (*padded_scratches)++;
8445 notes = gen_rtx (EXPR_LIST, REG_UNUSED,
8446 XEXP (XVECEXP (newpat, 0, i), 0), notes);
8447 }
8448 pat = newpat;
8449 }
8450
8451 *pnewpat = pat;
8452 *pnotes = notes;
8453
8454 return insn_code_number;

--- 23 unchanged lines hidden (view full) ---

8478 /* We can only support MODE being wider than a word if X is a
8479 constant integer or has a mode the same size. */
8480
8481 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
8482 && ! ((GET_MODE (x) == VOIDmode
8483 && (GET_CODE (x) == CONST_INT
8484 || GET_CODE (x) == CONST_DOUBLE))
8485 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
8486 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8487
8488 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
8489 won't know what to do. So we will strip off the SUBREG here and
8490 process normally. */
8491 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
8492 {
8493 x = SUBREG_REG (x);
8494 if (GET_MODE (x) == mode)
8495 return x;
8496 }
8497
8498 result = gen_lowpart_common (mode, x);
8499 if (result != 0
8500 && GET_CODE (result) == SUBREG
8501 && GET_CODE (SUBREG_REG (result)) == REG
8502 && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER
8503 && (GET_MODE_SIZE (GET_MODE (result))
8504 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (result)))))
8505 reg_changes_size[REGNO (SUBREG_REG (result))] = 1;
8506
8507 if (result)
8508 return result;
8509
8510 if (GET_CODE (x) == MEM)
8511 {
8512 register int offset = 0;
8513 rtx new;
8514
8515 /* Refuse to work on a volatile memory ref or one with a mode-dependent
8516 address. */
8517 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
8518 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8519
8520 /* If we want to refer to something bigger than the original memref,
8521 generate a perverse subreg instead. That will force a reload
8522 of the original memref X. */
8523 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
8524 return gen_rtx (SUBREG, mode, x, 0);
8525
8526 if (WORDS_BIG_ENDIAN)
8527 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
8528 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
8529 if (BYTES_BIG_ENDIAN)
8530 {
8531 /* Adjust the address so that the address-after-the-data is
8532 unchanged. */
8533 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
8534 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
8535 }
8536 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
8537 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
8538 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
8539 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
8540 return new;
8541 }
8542
8543 /* If X is a comparison operator, rewrite it in a new mode. This
8544 probably won't match, but may allow further simplifications. */

--- 6 unchanged lines hidden (view full) ---

8551 else
8552 {
8553 int word = 0;
8554
8555 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
8556 word = ((GET_MODE_SIZE (GET_MODE (x))
8557 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
8558 / UNITS_PER_WORD);
8559 return gen_rtx (SUBREG, mode, x, word);
8560 }
8561}
8562
8563/* Make an rtx expression. This is a subset of gen_rtx and only supports
8564 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
8565
8566 If the identical expression was previously in the insn (in the undobuf),
8567 it will be returned. Only if it is not found will a new expression

--- 5 unchanged lines hidden (view full) ---

8573{
8574#ifndef __STDC__
8575 enum rtx_code code;
8576 enum machine_mode mode;
8577#endif
8578 va_list p;
8579 int n_args;
8580 rtx args[3];
8581 int i, j;
8582 char *fmt;
8583 rtx rt;
8584
8585 VA_START (p, mode);
8586
8587#ifndef __STDC__
8588 code = va_arg (p, enum rtx_code);
8589 mode = va_arg (p, enum machine_mode);
8590#endif
8591

--- 10 unchanged lines hidden (view full) ---

8602 abort ();
8603
8604 args[j] = va_arg (p, rtx);
8605 }
8606
8607 /* See if this is in undobuf. Be sure we don't use objects that came
8608 from another insn; this could produce circular rtl structures. */
8609
8610 for (i = previous_num_undos; i < undobuf.num_undo; i++)
8611 if (!undobuf.undo[i].is_int
8612 && GET_CODE (undobuf.undo[i].old_contents.r) == code
8613 && GET_MODE (undobuf.undo[i].old_contents.r) == mode)
8614 {
8615 for (j = 0; j < n_args; j++)
8616 if (XEXP (undobuf.undo[i].old_contents.r, j) != args[j])
8617 break;
8618
8619 if (j == n_args)
8620 return undobuf.undo[i].old_contents.r;
8621 }
8622
8623 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
8624 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
8625 rt = rtx_alloc (code);
8626 PUT_MODE (rt, mode);
8627 XEXP (rt, 0) = args[0];
8628 if (n_args > 1)

--- 22 unchanged lines hidden (view full) ---

8651 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
8652 tem = op0, op0 = op1, op1 = tem;
8653
8654 if (GET_RTX_CLASS (code) == '<')
8655 {
8656 enum machine_mode op_mode = GET_MODE (op0);
8657
8658 /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
8659 just (REL_OP X Y). */
8660 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
8661 {
8662 op1 = XEXP (op0, 1);
8663 op0 = XEXP (op0, 0);
8664 op_mode = GET_MODE (op0);
8665 }
8666
8667 if (op_mode == VOIDmode)

--- 11 unchanged lines hidden (view full) ---

8679 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
8680 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
8681 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
8682 || (GET_CODE (op0) == SUBREG
8683 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
8684 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
8685 return gen_rtx_combine (code, mode, op1, op0);
8686
8687 return gen_rtx_combine (code, mode, op0, op1);
8688}
8689
8690static rtx
8691gen_unary (code, mode, op0_mode, op0)
8692 enum rtx_code code;
8693 enum machine_mode mode, op0_mode;
8694 rtx op0;

--- 116 unchanged lines hidden (view full) ---

8811 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
8812 int changed = 0;
8813
8814 if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
8815 && (GET_MODE_SIZE (GET_MODE (inner_op0))
8816 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
8817 && (GET_MODE (SUBREG_REG (inner_op0))
8818 == GET_MODE (SUBREG_REG (inner_op1)))
8819 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
8820 <= HOST_BITS_PER_WIDE_INT)
8821 && (0 == (~c0) & nonzero_bits (SUBREG_REG (inner_op0),
8822 GET_MODE (SUBREG_REG (op0))))
8823 && (0 == (~c1) & nonzero_bits (SUBREG_REG (inner_op1),
8824 GET_MODE (SUBREG_REG (inner_op1)))))
8825 {
8826 op0 = SUBREG_REG (inner_op0);
8827 op1 = SUBREG_REG (inner_op1);
8828
8829 /* The resulting comparison is always unsigned since we masked
8830 off the original sign bit. */
8831 code = unsigned_condition (code);
8832
8833 changed = 1;
8834 }
8835
8836 else if (c0 == c1)
8837 for (tmode = GET_CLASS_NARROWEST_MODE
8838 (GET_MODE_CLASS (GET_MODE (op0)));

--- 19 unchanged lines hidden (view full) ---

8858 && (code == EQ || code == NE)))
8859 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
8860
8861 else
8862 break;
8863 }
8864
8865 /* If the first operand is a constant, swap the operands and adjust the
8866 comparison code appropriately. */
8867 if (CONSTANT_P (op0))
8868 {
8869 tem = op0, op0 = op1, op1 = tem;
8870 code = swap_condition (code);
8871 }
8872
8873 /* We now enter a loop during which we will try to simplify the comparison.
8874 For the most part, we only are concerned with comparisons with zero,
8875 but some things may really be comparisons with zero but not start

--- 85 unchanged lines hidden (view full) ---

8961 else if (const_op == 0
8962 && mode_width <= HOST_BITS_PER_WIDE_INT
8963 && (nonzero_bits (op0, mode)
8964 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
8965 code = EQ;
8966 break;
8967
8968 case GE:
8969 /* >= C is equivalent to > (C - 1). */
8970 if (const_op > 0)
8971 {
8972 const_op -= 1;
8973 op1 = GEN_INT (const_op);
8974 code = GT;
8975 /* ... fall through to GT below. */
8976 }
8977 else

--- 19 unchanged lines hidden (view full) ---

8997
8998 case LTU:
8999 /* < C is equivalent to <= (C - 1). */
9000 if (const_op > 0)
9001 {
9002 const_op -= 1;
9003 op1 = GEN_INT (const_op);
9004 code = LEU;
9005 /* ... fall through ... */
9006 }
9007
9008 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
9009 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
9010 {
9011 const_op = 0, op1 = const0_rtx;
9012 code = GE;
9013 break;
9014 }
9015 else
9016 break;
9017
9018 case LEU:
9019 /* unsigned <= 0 is equivalent to == 0 */
9020 if (const_op == 0)
9021 code = EQ;
9022
9023 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
9024 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
9025 {
9026 const_op = 0, op1 = const0_rtx;
9027 code = GE;
9028 }
9029 break;
9030
9031 case GEU:
9032 /* >= C is equivalent to < (C - 1). */
9033 if (const_op > 1)
9034 {
9035 const_op -= 1;
9036 op1 = GEN_INT (const_op);
9037 code = GTU;
9038 /* ... fall through ... */
9039 }
9040
9041 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
9042 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
9043 {
9044 const_op = 0, op1 = const0_rtx;
9045 code = LT;
9046 break;
9047 }
9048 else
9049 break;
9050
9051 case GTU:
9052 /* unsigned > 0 is equivalent to != 0 */
9053 if (const_op == 0)
9054 code = NE;
9055
9056 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
9057 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
9058 {
9059 const_op = 0, op1 = const0_rtx;
9060 code = LT;
9061 }
9062 break;
9063 }
9064
9065 /* Compute some predicates to simplify code below. */
9066
9067 equality_comparison_p = (code == EQ || code == NE);
9068 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
9069 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
9070 || code == LEU);

--- 12 unchanged lines hidden (view full) ---

9083 switch. */
9084
9085 switch (GET_CODE (op0))
9086 {
9087 case ZERO_EXTRACT:
9088 /* If we are extracting a single bit from a variable position in
9089 a constant that has only a single bit set and are comparing it
9090 with zero, we can convert this into an equality comparison
9091 between the position and the location of the single bit. We can't
9092 do this if bit endian and we don't have an extzv since we then
9093 can't know what mode to use for the endianness adjustment. */
9094
9095 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
9096 && XEXP (op0, 1) == const1_rtx
9097 && equality_comparison_p && const_op == 0
9098 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0
9099 && (! BITS_BIG_ENDIAN
9100#ifdef HAVE_extzv
9101 || HAVE_extzv
9102#endif
9103 ))
9104 {
9105#ifdef HAVE_extzv
9106 if (BITS_BIG_ENDIAN)
9107 i = (GET_MODE_BITSIZE
9108 (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
9109#endif
9110
9111 op0 = XEXP (op0, 2);
9112 op1 = GEN_INT (i);
9113 const_op = i;
9114
9115 /* Result is nonzero iff shift count is equal to I. */
9116 code = reverse_condition (code);
9117 continue;
9118 }
9119
9120 /* ... fall through ... */
9121
9122 case SIGN_EXTRACT:
9123 tem = expand_compound_operation (op0);
9124 if (tem != op0)
9125 {
9126 op0 = tem;
9127 continue;
9128 }

--- 43 unchanged lines hidden (view full) ---

9172 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
9173 {
9174 op0 = XEXP (op0, 0);
9175 code = (code == LT ? NE : EQ);
9176 continue;
9177 }
9178
9179 /* If we have NEG of something whose two high-order bits are the
9180 same, we know that "(-a) < 0" is equivalent to "a > 0". */
9181 if (num_sign_bit_copies (op0, mode) >= 2)
9182 {
9183 op0 = XEXP (op0, 0);
9184 code = swap_condition (code);
9185 continue;
9186 }
9187 break;
9188

--- 19 unchanged lines hidden (view full) ---

9208 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9209 ((HOST_WIDE_INT) 1
9210 << (mode_width - 1
9211 - INTVAL (XEXP (op0, 1)))));
9212 code = (code == LT ? NE : EQ);
9213 continue;
9214 }
9215
9216 /* ... fall through ... */
9217
9218 case ABS:
9219 /* ABS is ignorable inside an equality comparison with zero. */
9220 if (const_op == 0 && equality_comparison_p)
9221 {
9222 op0 = XEXP (op0, 0);
9223 continue;
9224 }

--- 50 unchanged lines hidden (view full) ---

9275 /* If the inner mode is narrower and we are extracting the low part,
9276 we can treat the SUBREG as if it were a ZERO_EXTEND. */
9277 if (subreg_lowpart_p (op0)
9278 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
9279 /* Fall through */ ;
9280 else
9281 break;
9282
9283 /* ... fall through ... */
9284
9285 case ZERO_EXTEND:
9286 if ((unsigned_comparison_p || equality_comparison_p)
9287 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9288 <= HOST_BITS_PER_WIDE_INT)
9289 && ((unsigned HOST_WIDE_INT) const_op
9290 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
9291 {

--- 187 unchanged lines hidden (view full) ---

9479 & GET_MODE_MASK (mode))
9480 + 1)) >= 0
9481 && const_op >> i == 0
9482 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
9483 {
9484 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
9485 continue;
9486 }
9487 break;
9488
9489 case ASHIFT:
9490 /* If we have (compare (ashift FOO N) (const_int C)) and
9491 the high order N bits of FOO (N+1 if an inequality comparison)
9492 are known to be zero, we can do this by comparing FOO with C
9493 shifted right N bits so long as the low-order N bits of C are
9494 zero. */

--- 63 unchanged lines hidden (view full) ---

9558 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
9559 || ((unsigned HOST_WIDE_INT) - const_op
9560 <= GET_MODE_MASK (tmode))))
9561 {
9562 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
9563 continue;
9564 }
9565
9566 /* ... fall through ... */
9567 case LSHIFTRT:
9568 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
9569 the low order N bits of FOO are known to be zero, we can do this
9570 by comparing FOO with C shifted left N bits so long as no
9571 overflow occurs. */
9572 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
9573 && INTVAL (XEXP (op0, 1)) >= 0
9574 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT

--- 17 unchanged lines hidden (view full) ---

9592 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9593 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
9594 {
9595 op0 = XEXP (op0, 0);
9596 code = (code == NE || code == GT ? LT : GE);
9597 continue;
9598 }
9599 break;
9600 }
9601
9602 break;
9603 }
9604
9605 /* Now make any compound operations involved in this comparison. Then,
9606 check for an outmost SUBREG on OP0 that isn't doing anything or is
9607 paradoxical. The latter case can only occur when it is known that the
9608 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
9609 We can never remove a SUBREG for a non-equality comparison because the
9610 sign bit is in a different place in the underlying object. */
9611
9612 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
9613 op1 = make_compound_operation (op1, SET);
9614

--- 107 unchanged lines hidden (view full) ---

9722 if (REVERSIBLE_CC_MODE (GET_MODE (XEXP (x, 0))))
9723 return 1;
9724
9725 /* Otherwise try and find where the condition codes were last set and
9726 use that. */
9727 x = get_last_value (XEXP (x, 0));
9728 return (x && GET_CODE (x) == COMPARE
9729 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))));
9730 }
9731
9732 return 0;
9733}
9734
9735/* Utility function for following routine. Called when X is part of a value
9736 being stored into reg_last_set_value. Sets reg_last_set_table_tick
9737 for each register mentioned. Similar to mention_regs in cse.c */
9738
9739static void
9740update_table_tick (x)

--- 86 unchanged lines hidden (view full) ---

9827 reg_last_set_invalid[i] = 1;
9828 else
9829 reg_last_set_invalid[i] = 0;
9830 }
9831
9832 /* The value being assigned might refer to X (like in "x++;"). In that
9833 case, we must replace it with (clobber (const_int 0)) to prevent
9834 infinite loops. */
9835 if (value && ! get_last_value_validate (&value,
9836 reg_last_set_label[regno], 0))
9837 {
9838 value = copy_rtx (value);
9839 if (! get_last_value_validate (&value, reg_last_set_label[regno], 1))
9840 value = 0;
9841 }
9842
9843 /* For the main register being modified, update the value, the mode, the
9844 nonzero bits, and the number of sign bit copies. */
9845
9846 reg_last_set_value[regno] = value;
9847

--- 104 unchanged lines hidden (view full) ---

9952
9953 If REPLACE is non-zero, replace the invalid reference with
9954 (clobber (const_int 0)) and return 1. This replacement is useful because
9955 we often can get useful information about the form of a value (e.g., if
9956 it was produced by a shift that always produces -1 or 0) even though
9957 we don't know exactly what registers it was produced from. */
9958
9959static int
9960get_last_value_validate (loc, tick, replace)
9961 rtx *loc;
9962 int tick;
9963 int replace;
9964{
9965 rtx x = *loc;
9966 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
9967 int len = GET_RTX_LENGTH (GET_CODE (x));
9968 int i;
9969
9970 if (GET_CODE (x) == REG)
9971 {
9972 int regno = REGNO (x);
9973 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9974 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
9975 int j;
9976
9977 for (j = regno; j < endregno; j++)
9978 if (reg_last_set_invalid[j]
9979 /* If this is a pseudo-register that was only set once, it is
9980 always valid. */
9981 || (! (regno >= FIRST_PSEUDO_REGISTER && reg_n_sets[regno] == 1)
9982 && reg_last_set_label[j] > tick))
9983 {
9984 if (replace)
9985 *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
9986 return replace;
9987 }
9988
9989 return 1;
9990 }
9991
9992 for (i = 0; i < len; i++)
9993 if ((fmt[i] == 'e'
9994 && get_last_value_validate (&XEXP (x, i), tick, replace) == 0)
9995 /* Don't bother with these. They shouldn't occur anyway. */
9996 || fmt[i] == 'E')
9997 return 0;
9998
9999 /* If we haven't found a reason for it to be invalid, it is valid. */
10000 return 1;
10001}
10002

--- 5 unchanged lines hidden (view full) ---

10008get_last_value (x)
10009 rtx x;
10010{
10011 int regno;
10012 rtx value;
10013
10014 /* If this is a non-paradoxical SUBREG, get the value of its operand and
10015 then convert it to the desired mode. If this is a paradoxical SUBREG,
10016 we cannot predict what values the "extra" bits might have. */
10017 if (GET_CODE (x) == SUBREG
10018 && subreg_lowpart_p (x)
10019 && (GET_MODE_SIZE (GET_MODE (x))
10020 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
10021 && (value = get_last_value (SUBREG_REG (x))) != 0)
10022 return gen_lowpart_for_combine (GET_MODE (x), value);
10023
10024 if (GET_CODE (x) != REG)
10025 return 0;
10026
10027 regno = REGNO (x);
10028 value = reg_last_set_value[regno];
10029
10030 /* If we don't have a value or if it isn't for this basic block, return 0. */
10031
10032 if (value == 0
10033 || (reg_n_sets[regno] != 1
10034 && reg_last_set_label[regno] != label_tick))
10035 return 0;
10036
10037 /* If the value was set in a later insn than the ones we are processing,
10038 we can't use it even if the register was only set once, but make a quick
10039 check to see if the previous insn set it to something. This is commonly
10040 the case when the same pseudo is used by repeated insns.
10041

--- 30 unchanged lines hidden (view full) ---

10072 value = SET_SRC (set);
10073
10074 /* Make sure that VALUE doesn't reference X. Replace any
10075 explicit references with a CLOBBER. If there are any remaining
10076 references (rare), don't use the value. */
10077
10078 if (reg_mentioned_p (x, value))
10079 value = replace_rtx (copy_rtx (value), x,
10080 gen_rtx (CLOBBER, GET_MODE (x), const0_rtx));
10081
10082 if (reg_overlap_mentioned_p (x, value))
10083 return 0;
10084 }
10085 else
10086 return 0;
10087 }
10088
10089 /* If the value has all its registers valid, return it. */
10090 if (get_last_value_validate (&value, reg_last_set_label[regno], 0))
10091 return value;
10092
10093 /* Otherwise, make a copy and replace any invalid register with
10094 (clobber (const_int 0)). If that fails for some reason, return 0. */
10095
10096 value = copy_rtx (value);
10097 if (get_last_value_validate (&value, reg_last_set_label[regno], 1))
10098 return value;
10099
10100 return 0;
10101}
10102
10103/* Return nonzero if expression X refers to a REG or to memory
10104 that is set in an instruction more recent than FROM_CUID. */
10105

--- 129 unchanged lines hidden (view full) ---

10235 if (insn == basic_block_head[block])
10236 break;
10237
10238 if (block == n_basic_blocks)
10239 return 0;
10240 }
10241
10242 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
10243 if (basic_block_live_at_start[block][i / REGSET_ELT_BITS]
10244 & ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS)))
10245 return 0;
10246
10247 return 1;
10248}
10249
10250/* Note hard registers in X that are used. This code is similar to
10251 that in flow.c, but much simpler since we don't care about pseudos. */
10252

--- 64 unchanged lines hidden (view full) ---

10317 || GET_CODE (testreg) == SIGN_EXTRACT
10318 || GET_CODE (testreg) == STRICT_LOW_PART)
10319 testreg = XEXP (testreg, 0);
10320
10321 if (GET_CODE (testreg) == MEM)
10322 mark_used_regs_combine (XEXP (testreg, 0));
10323
10324 mark_used_regs_combine (SET_SRC (x));
10325 return;
10326 }
10327 }
10328
10329 /* Recursively scan the operands of this expression. */
10330
10331 {
10332 register char *fmt = GET_RTX_FORMAT (code);
10333
10334 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)

--- 20 unchanged lines hidden (view full) ---

10355remove_death (regno, insn)
10356 int regno;
10357 rtx insn;
10358{
10359 register rtx note = find_regno_note (insn, REG_DEAD, regno);
10360
10361 if (note)
10362 {
10363 reg_n_deaths[regno]--;
10364 remove_note (insn, note);
10365 }
10366
10367 return note;
10368}
10369
10370/* For each register (hardware or pseudo) used within expression X, if its
10371 death is in an instruction with cuid between FROM_CUID (inclusive) and
10372 TO_INSN (exclusive), put a REG_DEAD note for that register in the
10373 list headed by PNOTES.
10374
10375 This is done when X is being merged by combination into TO_INSN. These
10376 notes will then be distributed as needed. */
10377
10378static void
10379move_deaths (x, from_cuid, to_insn, pnotes)
10380 rtx x;
10381 int from_cuid;
10382 rtx to_insn;
10383 rtx *pnotes;
10384{
10385 register char *fmt;
10386 register int len, i;
10387 register enum rtx_code code = GET_CODE (x);
10388
10389 if (code == REG)
10390 {
10391 register int regno = REGNO (x);
10392 register rtx where_dead = reg_last_death[regno];
10393 register rtx before_dead, after_dead;
10394
10395 /* WHERE_DEAD could be a USE insn made by combine, so first we
10396 make sure that we have insns with valid INSN_CUID values. */
10397 before_dead = where_dead;
10398 while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
10399 before_dead = PREV_INSN (before_dead);
10400 after_dead = where_dead;
10401 while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
10402 after_dead = NEXT_INSN (after_dead);

--- 12 unchanged lines hidden (view full) ---

10415
10416 We must also check for the case where X is a hard register
10417 and NOTE is a death note for a range of hard registers
10418 including X. In that case, we must put REG_DEAD notes for
10419 the remaining registers in place of NOTE. */
10420
10421 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
10422 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
10423 != GET_MODE_SIZE (GET_MODE (x))))
10424 {
10425 int deadregno = REGNO (XEXP (note, 0));
10426 int deadend
10427 = (deadregno + HARD_REGNO_NREGS (deadregno,
10428 GET_MODE (XEXP (note, 0))));
10429 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
10430 int i;
10431
10432 for (i = deadregno; i < deadend; i++)
10433 if (i < regno || i >= ourend)
10434 REG_NOTES (where_dead)
10435 = gen_rtx (EXPR_LIST, REG_DEAD,
10436 gen_rtx (REG, reg_raw_mode[i], i),
10437 REG_NOTES (where_dead));
10438 }
10439 /* If we didn't find any note, and we have a multi-reg hard
10440 register, then to be safe we must check for REG_DEAD notes
10441 for each register other than the first. They could have
10442 their own REG_DEAD notes lying around. */
10443 else if (note == 0 && regno < FIRST_PSEUDO_REGISTER
10444 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
10445 {
10446 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
10447 int i;
10448 rtx oldnotes = 0;
10449
10450 for (i = regno + 1; i < ourend; i++)
10451 move_deaths (gen_rtx (REG, reg_raw_mode[i], i),
10452 from_cuid, to_insn, &oldnotes);
10453 }
10454
10455 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
10456 {
10457 XEXP (note, 1) = *pnotes;
10458 *pnotes = note;
10459 }
10460 else
10461 *pnotes = gen_rtx (EXPR_LIST, REG_DEAD, x, *pnotes);
10462
10463 reg_n_deaths[regno]++;
10464 }
10465
10466 return;
10467 }
10468
10469 else if (GET_CODE (x) == SET)
10470 {
10471 rtx dest = SET_DEST (x);
10472
10473 move_deaths (SET_SRC (x), from_cuid, to_insn, pnotes);
10474
10475 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
10476 that accesses one word of a multi-word item, some
10477 piece of everything register in the expression is used by
10478 this insn, so remove any old death. */
10479
10480 if (GET_CODE (dest) == ZERO_EXTRACT
10481 || GET_CODE (dest) == STRICT_LOW_PART
10482 || (GET_CODE (dest) == SUBREG
10483 && (((GET_MODE_SIZE (GET_MODE (dest))
10484 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
10485 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
10486 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
10487 {
10488 move_deaths (dest, from_cuid, to_insn, pnotes);
10489 return;
10490 }
10491
10492 /* If this is some other SUBREG, we know it replaces the entire
10493 value, so use that as the destination. */
10494 if (GET_CODE (dest) == SUBREG)
10495 dest = SUBREG_REG (dest);
10496
10497 /* If this is a MEM, adjust deaths of anything used in the address.
10498 For a REG (the only other possibility), the entire value is
10499 being replaced so the old value is not used in this insn. */
10500
10501 if (GET_CODE (dest) == MEM)
10502 move_deaths (XEXP (dest, 0), from_cuid, to_insn, pnotes);
10503 return;
10504 }
10505
10506 else if (GET_CODE (x) == CLOBBER)
10507 return;
10508
10509 len = GET_RTX_LENGTH (code);
10510 fmt = GET_RTX_FORMAT (code);
10511
10512 for (i = 0; i < len; i++)
10513 {
10514 if (fmt[i] == 'E')
10515 {
10516 register int j;
10517 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
10518 move_deaths (XVECEXP (x, i, j), from_cuid, to_insn, pnotes);
10519 }
10520 else if (fmt[i] == 'e')
10521 move_deaths (XEXP (x, i), from_cuid, to_insn, pnotes);
10522 }
10523}
10524
10525/* Return 1 if X is the target of a bit-field assignment in BODY, the
10526 pattern of an insn. X must be a REG. */
10527
10528static int
10529reg_bitfield_target_p (x, body)

--- 68 unchanged lines hidden (view full) ---

10598 the latest copy of that register. */
10599 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
10600 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
10601 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
10602
10603 next_note = XEXP (note, 1);
10604 switch (REG_NOTE_KIND (note))
10605 {
10606 case REG_UNUSED:
10607 /* Any clobbers for i3 may still exist, and so we must process
10608 REG_UNUSED notes from that insn.
10609
10610 Any clobbers from i2 or i1 can only exist if they were added by
10611 recog_for_combine. In that case, recog_for_combine created the
10612 necessary REG_UNUSED notes. Trying to keep any original
10613 REG_UNUSED notes from these insns can cause incorrect output

--- 28 unchanged lines hidden (view full) ---

10642 PUT_REG_NOTE_KIND (note, REG_DEAD);
10643 place = i3;
10644 }
10645 break;
10646
10647 case REG_EQUAL:
10648 case REG_EQUIV:
10649 case REG_NONNEG:
10650 /* These notes say something about results of an insn. We can
10651 only support them if they used to be on I3 in which case they
10652 remain on I3. Otherwise they are ignored.
10653
10654 If the note refers to an expression that is not a constant, we
10655 must also ignore the note since we cannot tell whether the
10656 equivalence is still true. It might be possible to do
10657 slightly better than this (we only have a problem if I2DEST

--- 86 unchanged lines hidden (view full) ---

10744 /* If the register is used in both I2 and I3 and it dies in I3,
10745 we might have added another reference to it. If reg_n_refs
10746 was 2, bump it to 3. This has to be correct since the
10747 register must have been set somewhere. The reason this is
10748 done is because local-alloc.c treats 2 references as a
10749 special case. */
10750
10751 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
10752 && reg_n_refs[REGNO (XEXP (note, 0))]== 2
10753 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
10754 reg_n_refs[REGNO (XEXP (note, 0))] = 3;
10755
10756 if (place == 0)
10757 {
10758 for (tem = prev_nonnote_insn (i3);
10759 place == 0 && tem
10760 && (GET_CODE (tem) == INSN || GET_CODE (tem) == CALL_INSN);
10761 tem = prev_nonnote_insn (tem))
10762 {
10763 /* If the register is being set at TEM, see if that is all
10764 TEM is doing. If so, delete TEM. Otherwise, make this
10765 into a REG_UNUSED note instead. */
10766 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
10767 {
10768 rtx set = single_set (tem);
10769
10770 /* Verify that it was the set, and not a clobber that
10771 modified the register. */
10772
10773 if (set != 0 && ! side_effects_p (SET_SRC (set))
10774 && (rtx_equal_p (XEXP (note, 0), SET_DEST (set))
10775 || (GET_CODE (SET_DEST (set)) == SUBREG
10776 && rtx_equal_p (XEXP (note, 0),
10777 XEXP (SET_DEST (set), 0)))))
10778 {
10779 /* Move the notes and links of TEM elsewhere.
10780 This might delete other dead insns recursively.
10781 First set the pattern to something that won't use
10782 any register. */
10783
10784 PATTERN (tem) = pc_rtx;
10785
10786 distribute_notes (REG_NOTES (tem), tem, tem,
10787 NULL_RTX, NULL_RTX, NULL_RTX);
10788 distribute_links (LOG_LINKS (tem));
10789
10790 PUT_CODE (tem, NOTE);
10791 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
10792 NOTE_SOURCE_FILE (tem) = 0;
10793 }
10794 else
10795 {
10796 PUT_REG_NOTE_KIND (note, REG_UNUSED);
10797
10798 /* If there isn't already a REG_UNUSED note, put one
10799 here. */
10800 if (! find_regno_note (tem, REG_UNUSED,
10801 REGNO (XEXP (note, 0))))

--- 29 unchanged lines hidden (view full) ---

10831 /* If we haven't found an insn for the death note and it
10832 is still a REG_DEAD note, but we have hit a CODE_LABEL,
10833 insert a USE insn for the register at that label and
10834 put the death node there. This prevents problems with
10835 call-state tracking in caller-save.c. */
10836 if (REG_NOTE_KIND (note) == REG_DEAD && place == 0 && tem != 0)
10837 {
10838 place
10839 = emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (note, 0)),
10840 tem);
10841
10842 /* If this insn was emitted between blocks, then update
10843 basic_block_head of the current block to include it. */
10844 if (basic_block_end[this_basic_block - 1] == tem)
10845 basic_block_head[this_basic_block] = place;
10846 }
10847 }
10848
10849 /* If the register is set or already dead at PLACE, we needn't do
10850 anything with this note if it is still a REG_DEAD note.
10851
10852 Note that we cannot use just `dead_or_set_p' here since we can
10853 convert an assignment to a register into a bit-field assignment.
10854 Therefore, we must also omit the note if the register is the
10855 target of a bitfield assignment. */
10856
10857 if (place && REG_NOTE_KIND (note) == REG_DEAD)
10858 {
10859 int regno = REGNO (XEXP (note, 0));
10860
10861 if (dead_or_set_p (place, XEXP (note, 0))
10862 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
10863 {
10864 /* Unless the register previously died in PLACE, clear

--- 25 unchanged lines hidden (view full) ---

10890 GET_MODE (XEXP (note, 0)));
10891 int all_used = 1;
10892 int i;
10893
10894 for (i = regno; i < endregno; i++)
10895 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
10896 && ! find_regno_fusage (place, USE, i))
10897 {
10898 rtx piece = gen_rtx (REG, reg_raw_mode[i], i);
10899 rtx p;
10900
10901 /* See if we already placed a USE note for this
10902 register in front of PLACE. */
10903 for (p = place;
10904 GET_CODE (PREV_INSN (p)) == INSN
10905 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
10906 p = PREV_INSN (p))
10907 if (rtx_equal_p (piece,
10908 XEXP (PATTERN (PREV_INSN (p)), 0)))
10909 {
10910 p = 0;
10911 break;
10912 }
10913
10914 if (p)
10915 {
10916 rtx use_insn
10917 = emit_insn_before (gen_rtx (USE, VOIDmode,
10918 piece),
10919 p);
10920 REG_NOTES (use_insn)
10921 = gen_rtx (EXPR_LIST, REG_DEAD, piece,
10922 REG_NOTES (use_insn));
10923 }
10924
10925 all_used = 0;
10926 }
10927
10928 /* Check for the case where the register dying partially
10929 overlaps the register set by this insn. */
10930 if (all_used)

--- 6 unchanged lines hidden (view full) ---

10937
10938 if (! all_used)
10939 {
10940 /* Put only REG_DEAD notes for pieces that are
10941 still used and that are not already dead or set. */
10942
10943 for (i = regno; i < endregno; i++)
10944 {
10945 rtx piece = gen_rtx (REG, reg_raw_mode[i], i);
10946
10947 if ((reg_referenced_p (piece, PATTERN (place))
10948 || (GET_CODE (place) == CALL_INSN
10949 && find_reg_fusage (place, USE, piece)))
10950 && ! dead_or_set_p (place, piece)
10951 && ! reg_bitfield_target_p (piece,
10952 PATTERN (place)))
10953 REG_NOTES (place) = gen_rtx (EXPR_LIST, REG_DEAD,
10954 piece,
10955 REG_NOTES (place));
10956 }
10957
10958 place = 0;
10959 }
10960 }
10961 }
10962 break;
10963

--- 6 unchanged lines hidden (view full) ---

10970 if (place)
10971 {
10972 XEXP (note, 1) = REG_NOTES (place);
10973 REG_NOTES (place) = note;
10974 }
10975 else if ((REG_NOTE_KIND (note) == REG_DEAD
10976 || REG_NOTE_KIND (note) == REG_UNUSED)
10977 && GET_CODE (XEXP (note, 0)) == REG)
10978 reg_n_deaths[REGNO (XEXP (note, 0))]--;
10979
10980 if (place2)
10981 {
10982 if ((REG_NOTE_KIND (note) == REG_DEAD
10983 || REG_NOTE_KIND (note) == REG_UNUSED)
10984 && GET_CODE (XEXP (note, 0)) == REG)
10985 reg_n_deaths[REGNO (XEXP (note, 0))]++;
10986
10987 REG_NOTES (place2) = gen_rtx (GET_CODE (note), REG_NOTE_KIND (note),
10988 XEXP (note, 0), REG_NOTES (place2));
10989 }
10990 }
10991}
10992
10993/* Similarly to above, distribute the LOG_LINKS that used to be present on
10994 I3, I2, and I1 to new locations. This is also called in one case to
10995 add a link pointing at I3 when I3's destination is changed. */
10996

--- 80 unchanged lines hidden (view full) ---

11077 if (added_links_insn == 0
11078 || INSN_CUID (added_links_insn) > INSN_CUID (place))
11079 added_links_insn = place;
11080 }
11081 }
11082 }
11083}
11084
11085void
11086dump_combine_stats (file)
11087 FILE *file;
11088{
11089 fprintf
11090 (file,
11091 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
11092 combine_attempts, combine_merges, combine_extras, combine_successes);
11093}
11094
11095void
11096dump_combine_total_stats (file)
11097 FILE *file;
11098{
11099 fprintf
11100 (file,
11101 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
11102 total_attempts, total_merges, total_extras, total_successes);
11103}