simplify-rtx.c revision 90075
10Sstevel@tonic-gate/* RTL simplification functions for GNU compiler. 20Sstevel@tonic-gate Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 30Sstevel@tonic-gate 1999, 2000, 2001 Free Software Foundation, Inc. 40Sstevel@tonic-gate 50Sstevel@tonic-gateThis file is part of GCC. 60Sstevel@tonic-gate 70Sstevel@tonic-gateGCC is free software; you can redistribute it and/or modify it under 80Sstevel@tonic-gatethe terms of the GNU General Public License as published by the Free 90Sstevel@tonic-gateSoftware Foundation; either version 2, or (at your option) any later 100Sstevel@tonic-gateversion. 110Sstevel@tonic-gate 120Sstevel@tonic-gateGCC is distributed in the hope that it will be useful, but WITHOUT ANY 130Sstevel@tonic-gateWARRANTY; without even the implied warranty of MERCHANTABILITY or 140Sstevel@tonic-gateFITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 150Sstevel@tonic-gatefor more details. 160Sstevel@tonic-gate 170Sstevel@tonic-gateYou should have received a copy of the GNU General Public License 180Sstevel@tonic-gatealong with GCC; see the file COPYING. If not, write to the Free 190Sstevel@tonic-gateSoftware Foundation, 59 Temple Place - Suite 330, Boston, MA 200Sstevel@tonic-gate02111-1307, USA. */ 210Sstevel@tonic-gate 220Sstevel@tonic-gate 23633Sgt29601#include "config.h" 240Sstevel@tonic-gate#include "system.h" 250Sstevel@tonic-gate 260Sstevel@tonic-gate#include "rtl.h" 270Sstevel@tonic-gate#include "tm_p.h" 280Sstevel@tonic-gate#include "regs.h" 290Sstevel@tonic-gate#include "hard-reg-set.h" 300Sstevel@tonic-gate#include "flags.h" 310Sstevel@tonic-gate#include "real.h" 320Sstevel@tonic-gate#include "insn-config.h" 330Sstevel@tonic-gate#include "recog.h" 340Sstevel@tonic-gate#include "function.h" 350Sstevel@tonic-gate#include "expr.h" 360Sstevel@tonic-gate#include "toplev.h" 370Sstevel@tonic-gate#include "output.h" 380Sstevel@tonic-gate#include "ggc.h" 390Sstevel@tonic-gate 400Sstevel@tonic-gate/* Simplification and canonicalization of RTL. */ 410Sstevel@tonic-gate 420Sstevel@tonic-gate/* Nonzero if X has the form (PLUS frame-pointer integer). We check for 430Sstevel@tonic-gate virtual regs here because the simplify_*_operation routines are called 440Sstevel@tonic-gate by integrate.c, which is called before virtual register instantiation. 450Sstevel@tonic-gate 460Sstevel@tonic-gate ?!? FIXED_BASE_PLUS_P and NONZERO_BASE_PLUS_P need to move into 470Sstevel@tonic-gate a header file so that their definitions can be shared with the 480Sstevel@tonic-gate simplification routines in simplify-rtx.c. Until then, do not 490Sstevel@tonic-gate change these macros without also changing the copy in simplify-rtx.c. */ 500Sstevel@tonic-gate 510Sstevel@tonic-gate#define FIXED_BASE_PLUS_P(X) \ 520Sstevel@tonic-gate ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \ 530Sstevel@tonic-gate || ((X) == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])\ 540Sstevel@tonic-gate || (X) == virtual_stack_vars_rtx \ 550Sstevel@tonic-gate || (X) == virtual_incoming_args_rtx \ 560Sstevel@tonic-gate || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \ 570Sstevel@tonic-gate && (XEXP (X, 0) == frame_pointer_rtx \ 580Sstevel@tonic-gate || XEXP (X, 0) == hard_frame_pointer_rtx \ 590Sstevel@tonic-gate || ((X) == arg_pointer_rtx \ 600Sstevel@tonic-gate && fixed_regs[ARG_POINTER_REGNUM]) \ 610Sstevel@tonic-gate || XEXP (X, 0) == virtual_stack_vars_rtx \ 620Sstevel@tonic-gate || XEXP (X, 0) == virtual_incoming_args_rtx)) \ 630Sstevel@tonic-gate || GET_CODE (X) == ADDRESSOF) 640Sstevel@tonic-gate 650Sstevel@tonic-gate/* Similar, but also allows reference to the stack pointer. 660Sstevel@tonic-gate 670Sstevel@tonic-gate This used to include FIXED_BASE_PLUS_P, however, we can't assume that 680Sstevel@tonic-gate arg_pointer_rtx by itself is nonzero, because on at least one machine, 690Sstevel@tonic-gate the i960, the arg pointer is zero when it is unused. */ 700Sstevel@tonic-gate 710Sstevel@tonic-gate#define NONZERO_BASE_PLUS_P(X) \ 720Sstevel@tonic-gate ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \ 730Sstevel@tonic-gate || (X) == virtual_stack_vars_rtx \ 740Sstevel@tonic-gate || (X) == virtual_incoming_args_rtx \ 750Sstevel@tonic-gate || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \ 760Sstevel@tonic-gate && (XEXP (X, 0) == frame_pointer_rtx \ 770Sstevel@tonic-gate || XEXP (X, 0) == hard_frame_pointer_rtx \ 780Sstevel@tonic-gate || ((X) == arg_pointer_rtx \ 790Sstevel@tonic-gate && fixed_regs[ARG_POINTER_REGNUM]) \ 800Sstevel@tonic-gate || XEXP (X, 0) == virtual_stack_vars_rtx \ 810Sstevel@tonic-gate || XEXP (X, 0) == virtual_incoming_args_rtx)) \ 820Sstevel@tonic-gate || (X) == stack_pointer_rtx \ 830Sstevel@tonic-gate || (X) == virtual_stack_dynamic_rtx \ 840Sstevel@tonic-gate || (X) == virtual_outgoing_args_rtx \ 850Sstevel@tonic-gate || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \ 860Sstevel@tonic-gate && (XEXP (X, 0) == stack_pointer_rtx \ 870Sstevel@tonic-gate || XEXP (X, 0) == virtual_stack_dynamic_rtx \ 880Sstevel@tonic-gate || XEXP (X, 0) == virtual_outgoing_args_rtx)) \ 890Sstevel@tonic-gate || GET_CODE (X) == ADDRESSOF) 900Sstevel@tonic-gate 910Sstevel@tonic-gate/* Much code operates on (low, high) pairs; the low value is an 920Sstevel@tonic-gate unsigned wide int, the high value a signed wide int. We 930Sstevel@tonic-gate occasionally need to sign extend from low to high as if low were a 940Sstevel@tonic-gate signed wide int. */ 950Sstevel@tonic-gate#define HWI_SIGN_EXTEND(low) \ 960Sstevel@tonic-gate ((((HOST_WIDE_INT) low) < 0) ? ((HOST_WIDE_INT) -1) : ((HOST_WIDE_INT) 0)) 970Sstevel@tonic-gate 980Sstevel@tonic-gatestatic rtx neg_const_int PARAMS ((enum machine_mode, rtx)); 990Sstevel@tonic-gatestatic int simplify_plus_minus_op_data_cmp PARAMS ((const void *, 1000Sstevel@tonic-gate const void *)); 1010Sstevel@tonic-gatestatic rtx simplify_plus_minus PARAMS ((enum rtx_code, 1020Sstevel@tonic-gate enum machine_mode, rtx, rtx)); 1030Sstevel@tonic-gatestatic void check_fold_consts PARAMS ((PTR)); 1040Sstevel@tonic-gate#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC) 1050Sstevel@tonic-gatestatic void simplify_unary_real PARAMS ((PTR)); 1060Sstevel@tonic-gatestatic void simplify_binary_real PARAMS ((PTR)); 1070Sstevel@tonic-gate#endif 1080Sstevel@tonic-gatestatic void simplify_binary_is2orm1 PARAMS ((PTR)); 1090Sstevel@tonic-gate 1100Sstevel@tonic-gate 1110Sstevel@tonic-gate/* Negate a CONST_INT rtx, truncating (because a conversion from a 1120Sstevel@tonic-gate maximally negative number can overflow). */ 1130Sstevel@tonic-gatestatic rtx 1140Sstevel@tonic-gateneg_const_int (mode, i) 1150Sstevel@tonic-gate enum machine_mode mode; 1160Sstevel@tonic-gate rtx i; 1170Sstevel@tonic-gate{ 1180Sstevel@tonic-gate return GEN_INT (trunc_int_for_mode (- INTVAL (i), mode)); 1190Sstevel@tonic-gate} 1200Sstevel@tonic-gate 1210Sstevel@tonic-gate 1220Sstevel@tonic-gate/* Make a binary operation by properly ordering the operands and 1230Sstevel@tonic-gate seeing if the expression folds. */ 1240Sstevel@tonic-gate 1250Sstevel@tonic-gatertx 1260Sstevel@tonic-gatesimplify_gen_binary (code, mode, op0, op1) 1270Sstevel@tonic-gate enum rtx_code code; 1280Sstevel@tonic-gate enum machine_mode mode; 1290Sstevel@tonic-gate rtx op0, op1; 1300Sstevel@tonic-gate{ 1310Sstevel@tonic-gate rtx tem; 1320Sstevel@tonic-gate 1330Sstevel@tonic-gate /* Put complex operands first and constants second if commutative. */ 1340Sstevel@tonic-gate if (GET_RTX_CLASS (code) == 'c' 1350Sstevel@tonic-gate && swap_commutative_operands_p (op0, op1)) 1360Sstevel@tonic-gate tem = op0, op0 = op1, op1 = tem; 1370Sstevel@tonic-gate 1380Sstevel@tonic-gate /* If this simplifies, do it. */ 1390Sstevel@tonic-gate tem = simplify_binary_operation (code, mode, op0, op1); 1400Sstevel@tonic-gate 1410Sstevel@tonic-gate if (tem) 1420Sstevel@tonic-gate return tem; 1430Sstevel@tonic-gate 1440Sstevel@tonic-gate /* Handle addition and subtraction of CONST_INT specially. Otherwise, 1450Sstevel@tonic-gate just form the operation. */ 1460Sstevel@tonic-gate 1470Sstevel@tonic-gate if (GET_CODE (op1) == CONST_INT 1480Sstevel@tonic-gate && GET_MODE (op0) != VOIDmode 1490Sstevel@tonic-gate && (code == PLUS || code == MINUS)) 1500Sstevel@tonic-gate { 1510Sstevel@tonic-gate if (code == MINUS) 1520Sstevel@tonic-gate op1 = neg_const_int (mode, op1); 1530Sstevel@tonic-gate return plus_constant (op0, INTVAL (op1)); 1540Sstevel@tonic-gate } 1550Sstevel@tonic-gate else 1560Sstevel@tonic-gate return gen_rtx_fmt_ee (code, mode, op0, op1); 1570Sstevel@tonic-gate} 1580Sstevel@tonic-gate 1590Sstevel@tonic-gate/* If X is a MEM referencing the constant pool, return the real value. 1600Sstevel@tonic-gate Otherwise return X. */ 1610Sstevel@tonic-gatertx 1620Sstevel@tonic-gateavoid_constant_pool_reference (x) 1630Sstevel@tonic-gate rtx x; 1640Sstevel@tonic-gate{ 1650Sstevel@tonic-gate rtx c, addr; 1660Sstevel@tonic-gate enum machine_mode cmode; 1670Sstevel@tonic-gate 1680Sstevel@tonic-gate if (GET_CODE (x) != MEM) 1690Sstevel@tonic-gate return x; 1700Sstevel@tonic-gate addr = XEXP (x, 0); 1710Sstevel@tonic-gate 1720Sstevel@tonic-gate if (GET_CODE (addr) != SYMBOL_REF 1730Sstevel@tonic-gate || ! CONSTANT_POOL_ADDRESS_P (addr)) 1740Sstevel@tonic-gate return x; 1750Sstevel@tonic-gate 1760Sstevel@tonic-gate c = get_pool_constant (addr); 1770Sstevel@tonic-gate cmode = get_pool_mode (addr); 1780Sstevel@tonic-gate 1790Sstevel@tonic-gate /* If we're accessing the constant in a different mode than it was 1800Sstevel@tonic-gate originally stored, attempt to fix that up via subreg simplifications. 1810Sstevel@tonic-gate If that fails we have no choice but to return the original memory. */ 1820Sstevel@tonic-gate if (cmode != GET_MODE (x)) 1830Sstevel@tonic-gate { 1840Sstevel@tonic-gate c = simplify_subreg (GET_MODE (x), c, cmode, 0); 1850Sstevel@tonic-gate return c ? c : x; 1860Sstevel@tonic-gate } 1870Sstevel@tonic-gate 1880Sstevel@tonic-gate return c; 1890Sstevel@tonic-gate} 1900Sstevel@tonic-gate 1910Sstevel@tonic-gate/* Make a unary operation by first seeing if it folds and otherwise making 1920Sstevel@tonic-gate the specified operation. */ 1930Sstevel@tonic-gate 194633Sgt29601rtx 1950Sstevel@tonic-gatesimplify_gen_unary (code, mode, op, op_mode) 1960Sstevel@tonic-gate enum rtx_code code; 1970Sstevel@tonic-gate enum machine_mode mode; 1980Sstevel@tonic-gate rtx op; 1990Sstevel@tonic-gate enum machine_mode op_mode; 2000Sstevel@tonic-gate{ 2010Sstevel@tonic-gate rtx tem; 2020Sstevel@tonic-gate 2030Sstevel@tonic-gate /* If this simplifies, use it. */ 2040Sstevel@tonic-gate if ((tem = simplify_unary_operation (code, mode, op, op_mode)) != 0) 2050Sstevel@tonic-gate return tem; 2060Sstevel@tonic-gate 2070Sstevel@tonic-gate return gen_rtx_fmt_e (code, mode, op); 2080Sstevel@tonic-gate} 2090Sstevel@tonic-gate 2100Sstevel@tonic-gate/* Likewise for ternary operations. */ 2110Sstevel@tonic-gate 2120Sstevel@tonic-gatertx 2130Sstevel@tonic-gatesimplify_gen_ternary (code, mode, op0_mode, op0, op1, op2) 2140Sstevel@tonic-gate enum rtx_code code; 2150Sstevel@tonic-gate enum machine_mode mode, op0_mode; 2160Sstevel@tonic-gate rtx op0, op1, op2; 2170Sstevel@tonic-gate{ 2180Sstevel@tonic-gate rtx tem; 2190Sstevel@tonic-gate 2200Sstevel@tonic-gate /* If this simplifies, use it. */ 2210Sstevel@tonic-gate if (0 != (tem = simplify_ternary_operation (code, mode, op0_mode, 2220Sstevel@tonic-gate op0, op1, op2))) 2230Sstevel@tonic-gate return tem; 2240Sstevel@tonic-gate 2250Sstevel@tonic-gate return gen_rtx_fmt_eee (code, mode, op0, op1, op2); 2260Sstevel@tonic-gate} 2270Sstevel@tonic-gate 2280Sstevel@tonic-gate/* Likewise, for relational operations. 2290Sstevel@tonic-gate CMP_MODE specifies mode comparison is done in. 2300Sstevel@tonic-gate */ 2310Sstevel@tonic-gate 2320Sstevel@tonic-gatertx 2330Sstevel@tonic-gatesimplify_gen_relational (code, mode, cmp_mode, op0, op1) 2340Sstevel@tonic-gate enum rtx_code code; 2350Sstevel@tonic-gate enum machine_mode mode; 2360Sstevel@tonic-gate enum machine_mode cmp_mode; 2370Sstevel@tonic-gate rtx op0, op1; 2380Sstevel@tonic-gate{ 2390Sstevel@tonic-gate rtx tem; 2400Sstevel@tonic-gate 2410Sstevel@tonic-gate if ((tem = simplify_relational_operation (code, cmp_mode, op0, op1)) != 0) 2420Sstevel@tonic-gate return tem; 2430Sstevel@tonic-gate 2440Sstevel@tonic-gate /* Put complex operands first and constants second. */ 2450Sstevel@tonic-gate if (swap_commutative_operands_p (op0, op1)) 2460Sstevel@tonic-gate tem = op0, op0 = op1, op1 = tem, code = swap_condition (code); 2470Sstevel@tonic-gate 2480Sstevel@tonic-gate return gen_rtx_fmt_ee (code, mode, op0, op1); 2490Sstevel@tonic-gate} 2500Sstevel@tonic-gate 2510Sstevel@tonic-gate/* Replace all occurrences of OLD in X with NEW and try to simplify the 2520Sstevel@tonic-gate resulting RTX. Return a new RTX which is as simplified as possible. */ 2530Sstevel@tonic-gate 2540Sstevel@tonic-gatertx 2550Sstevel@tonic-gatesimplify_replace_rtx (x, old, new) 2560Sstevel@tonic-gate rtx x; 2570Sstevel@tonic-gate rtx old; 2580Sstevel@tonic-gate rtx new; 2590Sstevel@tonic-gate{ 2600Sstevel@tonic-gate enum rtx_code code = GET_CODE (x); 2610Sstevel@tonic-gate enum machine_mode mode = GET_MODE (x); 2620Sstevel@tonic-gate 2630Sstevel@tonic-gate /* If X is OLD, return NEW. Otherwise, if this is an expression, try 2640Sstevel@tonic-gate to build a new expression substituting recursively. If we can't do 2650Sstevel@tonic-gate anything, return our input. */ 2660Sstevel@tonic-gate 2670Sstevel@tonic-gate if (x == old) 2680Sstevel@tonic-gate return new; 2690Sstevel@tonic-gate 2700Sstevel@tonic-gate switch (GET_RTX_CLASS (code)) 2710Sstevel@tonic-gate { 2720Sstevel@tonic-gate case '1': 2730Sstevel@tonic-gate { 2740Sstevel@tonic-gate enum machine_mode op_mode = GET_MODE (XEXP (x, 0)); 2750Sstevel@tonic-gate rtx op = (XEXP (x, 0) == old 2760Sstevel@tonic-gate ? new : simplify_replace_rtx (XEXP (x, 0), old, new)); 2770Sstevel@tonic-gate 2780Sstevel@tonic-gate return simplify_gen_unary (code, mode, op, op_mode); 2790Sstevel@tonic-gate } 2800Sstevel@tonic-gate 2810Sstevel@tonic-gate case '2': 2820Sstevel@tonic-gate case 'c': 2830Sstevel@tonic-gate return 2840Sstevel@tonic-gate simplify_gen_binary (code, mode, 2850Sstevel@tonic-gate simplify_replace_rtx (XEXP (x, 0), old, new), 2860Sstevel@tonic-gate simplify_replace_rtx (XEXP (x, 1), old, new)); 2870Sstevel@tonic-gate case '<': 2880Sstevel@tonic-gate { 2890Sstevel@tonic-gate enum machine_mode op_mode = (GET_MODE (XEXP (x, 0)) != VOIDmode 2900Sstevel@tonic-gate ? GET_MODE (XEXP (x, 0)) 2910Sstevel@tonic-gate : GET_MODE (XEXP (x, 1))); 2920Sstevel@tonic-gate rtx op0 = simplify_replace_rtx (XEXP (x, 0), old, new); 2930Sstevel@tonic-gate rtx op1 = simplify_replace_rtx (XEXP (x, 1), old, new); 2940Sstevel@tonic-gate 2950Sstevel@tonic-gate return 2960Sstevel@tonic-gate simplify_gen_relational (code, mode, 2970Sstevel@tonic-gate (op_mode != VOIDmode 2980Sstevel@tonic-gate ? op_mode 2990Sstevel@tonic-gate : GET_MODE (op0) != VOIDmode 3000Sstevel@tonic-gate ? GET_MODE (op0) 3010Sstevel@tonic-gate : GET_MODE (op1)), 3020Sstevel@tonic-gate op0, op1); 3030Sstevel@tonic-gate } 3040Sstevel@tonic-gate 3050Sstevel@tonic-gate case '3': 3060Sstevel@tonic-gate case 'b': 3070Sstevel@tonic-gate { 3080Sstevel@tonic-gate enum machine_mode op_mode = GET_MODE (XEXP (x, 0)); 3090Sstevel@tonic-gate rtx op0 = simplify_replace_rtx (XEXP (x, 0), old, new); 3100Sstevel@tonic-gate 3110Sstevel@tonic-gate return 3120Sstevel@tonic-gate simplify_gen_ternary (code, mode, 3130Sstevel@tonic-gate (op_mode != VOIDmode 3140Sstevel@tonic-gate ? op_mode 3150Sstevel@tonic-gate : GET_MODE (op0)), 3160Sstevel@tonic-gate op0, 3170Sstevel@tonic-gate simplify_replace_rtx (XEXP (x, 1), old, new), 3180Sstevel@tonic-gate simplify_replace_rtx (XEXP (x, 2), old, new)); 3190Sstevel@tonic-gate } 3200Sstevel@tonic-gate 3210Sstevel@tonic-gate case 'x': 3220Sstevel@tonic-gate /* The only case we try to handle is a SUBREG. */ 3230Sstevel@tonic-gate if (code == SUBREG) 3240Sstevel@tonic-gate { 3250Sstevel@tonic-gate rtx exp; 3260Sstevel@tonic-gate exp = simplify_gen_subreg (GET_MODE (x), 3270Sstevel@tonic-gate simplify_replace_rtx (SUBREG_REG (x), 3280Sstevel@tonic-gate old, new), 3290Sstevel@tonic-gate GET_MODE (SUBREG_REG (x)), 3300Sstevel@tonic-gate SUBREG_BYTE (x)); 3310Sstevel@tonic-gate if (exp) 3320Sstevel@tonic-gate x = exp; 3330Sstevel@tonic-gate } 3340Sstevel@tonic-gate return x; 3350Sstevel@tonic-gate 3360Sstevel@tonic-gate default: 3370Sstevel@tonic-gate if (GET_CODE (x) == MEM) 3380Sstevel@tonic-gate return 3390Sstevel@tonic-gate replace_equiv_address_nv (x, 3400Sstevel@tonic-gate simplify_replace_rtx (XEXP (x, 0), 3410Sstevel@tonic-gate old, new)); 3420Sstevel@tonic-gate 3430Sstevel@tonic-gate return x; 3440Sstevel@tonic-gate } 3450Sstevel@tonic-gate return x; 3460Sstevel@tonic-gate} 3470Sstevel@tonic-gate 3480Sstevel@tonic-gate#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC) 3490Sstevel@tonic-gate/* Subroutine of simplify_unary_operation, called via do_float_handler. 3500Sstevel@tonic-gate Handles simplification of unary ops on floating point values. */ 3510Sstevel@tonic-gatestruct simplify_unary_real_args 3520Sstevel@tonic-gate{ 3530Sstevel@tonic-gate rtx operand; 3540Sstevel@tonic-gate rtx result; 3550Sstevel@tonic-gate enum machine_mode mode; 3560Sstevel@tonic-gate enum rtx_code code; 3570Sstevel@tonic-gate bool want_integer; 3580Sstevel@tonic-gate}; 3590Sstevel@tonic-gate#define REAL_VALUE_ABS(d_) \ 3600Sstevel@tonic-gate (REAL_VALUE_NEGATIVE (d_) ? REAL_VALUE_NEGATE (d_) : (d_)) 3610Sstevel@tonic-gate 3620Sstevel@tonic-gatestatic void 3630Sstevel@tonic-gatesimplify_unary_real (p) 3640Sstevel@tonic-gate PTR p; 3650Sstevel@tonic-gate{ 3660Sstevel@tonic-gate REAL_VALUE_TYPE d; 3670Sstevel@tonic-gate 3680Sstevel@tonic-gate struct simplify_unary_real_args *args = 3690Sstevel@tonic-gate (struct simplify_unary_real_args *) p; 3700Sstevel@tonic-gate 3710Sstevel@tonic-gate REAL_VALUE_FROM_CONST_DOUBLE (d, args->operand); 3720Sstevel@tonic-gate 3730Sstevel@tonic-gate if (args->want_integer) 3740Sstevel@tonic-gate { 3750Sstevel@tonic-gate HOST_WIDE_INT i; 3760Sstevel@tonic-gate 3770Sstevel@tonic-gate switch (args->code) 3780Sstevel@tonic-gate { 3790Sstevel@tonic-gate case FIX: i = REAL_VALUE_FIX (d); break; 3800Sstevel@tonic-gate case UNSIGNED_FIX: i = REAL_VALUE_UNSIGNED_FIX (d); break; 3810Sstevel@tonic-gate default: 3820Sstevel@tonic-gate abort (); 3830Sstevel@tonic-gate } 3840Sstevel@tonic-gate args->result = GEN_INT (trunc_int_for_mode (i, args->mode)); 3850Sstevel@tonic-gate } 3860Sstevel@tonic-gate else 3870Sstevel@tonic-gate { 3880Sstevel@tonic-gate switch (args->code) 3890Sstevel@tonic-gate { 3900Sstevel@tonic-gate case SQRT: 3910Sstevel@tonic-gate /* We don't attempt to optimize this. */ 3920Sstevel@tonic-gate args->result = 0; 3930Sstevel@tonic-gate return; 3940Sstevel@tonic-gate 3950Sstevel@tonic-gate case ABS: d = REAL_VALUE_ABS (d); break; 3960Sstevel@tonic-gate case NEG: d = REAL_VALUE_NEGATE (d); break; 3970Sstevel@tonic-gate case FLOAT_TRUNCATE: d = real_value_truncate (args->mode, d); break; 3980Sstevel@tonic-gate case FLOAT_EXTEND: /* All this does is change the mode. */ break; 3990Sstevel@tonic-gate case FIX: d = REAL_VALUE_RNDZINT (d); break; 4000Sstevel@tonic-gate case UNSIGNED_FIX: d = REAL_VALUE_UNSIGNED_RNDZINT (d); break; 4010Sstevel@tonic-gate default: 4020Sstevel@tonic-gate abort (); 4030Sstevel@tonic-gate } 4040Sstevel@tonic-gate args->result = CONST_DOUBLE_FROM_REAL_VALUE (d, args->mode); 4050Sstevel@tonic-gate } 4060Sstevel@tonic-gate} 4070Sstevel@tonic-gate#endif 4080Sstevel@tonic-gate 4090Sstevel@tonic-gate/* Try to simplify a unary operation CODE whose output mode is to be 4100Sstevel@tonic-gate MODE with input operand OP whose mode was originally OP_MODE. 4110Sstevel@tonic-gate Return zero if no simplification can be made. */ 4120Sstevel@tonic-gatertx 4130Sstevel@tonic-gatesimplify_unary_operation (code, mode, op, op_mode) 4140Sstevel@tonic-gate enum rtx_code code; 4150Sstevel@tonic-gate enum machine_mode mode; 4160Sstevel@tonic-gate rtx op; 4170Sstevel@tonic-gate enum machine_mode op_mode; 4180Sstevel@tonic-gate{ 4190Sstevel@tonic-gate unsigned int width = GET_MODE_BITSIZE (mode); 4200Sstevel@tonic-gate rtx trueop = avoid_constant_pool_reference (op); 4210Sstevel@tonic-gate 4220Sstevel@tonic-gate /* The order of these tests is critical so that, for example, we don't 4230Sstevel@tonic-gate check the wrong mode (input vs. output) for a conversion operation, 4240Sstevel@tonic-gate such as FIX. At some point, this should be simplified. */ 4250Sstevel@tonic-gate 4260Sstevel@tonic-gate#if !defined(REAL_IS_NOT_DOUBLE) || defined(REAL_ARITHMETIC) 4270Sstevel@tonic-gate 4280Sstevel@tonic-gate if (code == FLOAT && GET_MODE (trueop) == VOIDmode 4290Sstevel@tonic-gate && (GET_CODE (trueop) == CONST_DOUBLE || GET_CODE (trueop) == CONST_INT)) 4300Sstevel@tonic-gate { 4310Sstevel@tonic-gate HOST_WIDE_INT hv, lv; 4320Sstevel@tonic-gate REAL_VALUE_TYPE d; 4330Sstevel@tonic-gate 4340Sstevel@tonic-gate if (GET_CODE (trueop) == CONST_INT) 4350Sstevel@tonic-gate lv = INTVAL (trueop), hv = HWI_SIGN_EXTEND (lv); 4360Sstevel@tonic-gate else 4370Sstevel@tonic-gate lv = CONST_DOUBLE_LOW (trueop), hv = CONST_DOUBLE_HIGH (trueop); 4380Sstevel@tonic-gate 4390Sstevel@tonic-gate#ifdef REAL_ARITHMETIC 4400Sstevel@tonic-gate REAL_VALUE_FROM_INT (d, lv, hv, mode); 4410Sstevel@tonic-gate#else 4420Sstevel@tonic-gate if (hv < 0) 4430Sstevel@tonic-gate { 4440Sstevel@tonic-gate d = (double) (~ hv); 4450Sstevel@tonic-gate d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) 4460Sstevel@tonic-gate * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))); 4470Sstevel@tonic-gate d += (double) (unsigned HOST_WIDE_INT) (~ lv); 4480Sstevel@tonic-gate d = (- d - 1.0); 4490Sstevel@tonic-gate } 4500Sstevel@tonic-gate else 4510Sstevel@tonic-gate { 4520Sstevel@tonic-gate d = (double) hv; 4530Sstevel@tonic-gate d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) 4540Sstevel@tonic-gate * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))); 4550Sstevel@tonic-gate d += (double) (unsigned HOST_WIDE_INT) lv; 4560Sstevel@tonic-gate } 4570Sstevel@tonic-gate#endif /* REAL_ARITHMETIC */ 4580Sstevel@tonic-gate d = real_value_truncate (mode, d); 4590Sstevel@tonic-gate return CONST_DOUBLE_FROM_REAL_VALUE (d, mode); 4600Sstevel@tonic-gate } 4610Sstevel@tonic-gate else if (code == UNSIGNED_FLOAT && GET_MODE (trueop) == VOIDmode 4620Sstevel@tonic-gate && (GET_CODE (trueop) == CONST_DOUBLE 4630Sstevel@tonic-gate || GET_CODE (trueop) == CONST_INT)) 4640Sstevel@tonic-gate { 4650Sstevel@tonic-gate HOST_WIDE_INT hv, lv; 4660Sstevel@tonic-gate REAL_VALUE_TYPE d; 4670Sstevel@tonic-gate 4680Sstevel@tonic-gate if (GET_CODE (trueop) == CONST_INT) 4690Sstevel@tonic-gate lv = INTVAL (trueop), hv = HWI_SIGN_EXTEND (lv); 4700Sstevel@tonic-gate else 4710Sstevel@tonic-gate lv = CONST_DOUBLE_LOW (trueop), hv = CONST_DOUBLE_HIGH (trueop); 4720Sstevel@tonic-gate 4730Sstevel@tonic-gate if (op_mode == VOIDmode) 4740Sstevel@tonic-gate { 4750Sstevel@tonic-gate /* We don't know how to interpret negative-looking numbers in 4760Sstevel@tonic-gate this case, so don't try to fold those. */ 4770Sstevel@tonic-gate if (hv < 0) 4780Sstevel@tonic-gate return 0; 4790Sstevel@tonic-gate } 4800Sstevel@tonic-gate else if (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT * 2) 4810Sstevel@tonic-gate ; 4820Sstevel@tonic-gate else 4830Sstevel@tonic-gate hv = 0, lv &= GET_MODE_MASK (op_mode); 4840Sstevel@tonic-gate 4850Sstevel@tonic-gate#ifdef REAL_ARITHMETIC 4860Sstevel@tonic-gate REAL_VALUE_FROM_UNSIGNED_INT (d, lv, hv, mode); 4870Sstevel@tonic-gate#else 4880Sstevel@tonic-gate 4890Sstevel@tonic-gate d = (double) (unsigned HOST_WIDE_INT) hv; 4900Sstevel@tonic-gate d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) 4910Sstevel@tonic-gate * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))); 4920Sstevel@tonic-gate d += (double) (unsigned HOST_WIDE_INT) lv; 4930Sstevel@tonic-gate#endif /* REAL_ARITHMETIC */ 4940Sstevel@tonic-gate d = real_value_truncate (mode, d); 4950Sstevel@tonic-gate return CONST_DOUBLE_FROM_REAL_VALUE (d, mode); 4960Sstevel@tonic-gate } 4970Sstevel@tonic-gate#endif 4980Sstevel@tonic-gate 4990Sstevel@tonic-gate if (GET_CODE (trueop) == CONST_INT 5000Sstevel@tonic-gate && width <= HOST_BITS_PER_WIDE_INT && width > 0) 5010Sstevel@tonic-gate { 5020Sstevel@tonic-gate HOST_WIDE_INT arg0 = INTVAL (trueop); 5030Sstevel@tonic-gate HOST_WIDE_INT val; 5040Sstevel@tonic-gate 5050Sstevel@tonic-gate switch (code) 5060Sstevel@tonic-gate { 5070Sstevel@tonic-gate case NOT: 5080Sstevel@tonic-gate val = ~ arg0; 5090Sstevel@tonic-gate break; 5100Sstevel@tonic-gate 5110Sstevel@tonic-gate case NEG: 5120Sstevel@tonic-gate val = - arg0; 5130Sstevel@tonic-gate break; 5140Sstevel@tonic-gate 5150Sstevel@tonic-gate case ABS: 5160Sstevel@tonic-gate val = (arg0 >= 0 ? arg0 : - arg0); 5170Sstevel@tonic-gate break; 5180Sstevel@tonic-gate 5190Sstevel@tonic-gate case FFS: 5200Sstevel@tonic-gate /* Don't use ffs here. Instead, get low order bit and then its 5210Sstevel@tonic-gate number. If arg0 is zero, this will return 0, as desired. */ 5220Sstevel@tonic-gate arg0 &= GET_MODE_MASK (mode); 5230Sstevel@tonic-gate val = exact_log2 (arg0 & (- arg0)) + 1; 5240Sstevel@tonic-gate break; 5250Sstevel@tonic-gate 5260Sstevel@tonic-gate case TRUNCATE: 5270Sstevel@tonic-gate val = arg0; 5280Sstevel@tonic-gate break; 5290Sstevel@tonic-gate 5300Sstevel@tonic-gate case ZERO_EXTEND: 5310Sstevel@tonic-gate if (op_mode == VOIDmode) 5320Sstevel@tonic-gate op_mode = mode; 5330Sstevel@tonic-gate if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT) 5340Sstevel@tonic-gate { 5350Sstevel@tonic-gate /* If we were really extending the mode, 5360Sstevel@tonic-gate we would have to distinguish between zero-extension 5370Sstevel@tonic-gate and sign-extension. */ 5380Sstevel@tonic-gate if (width != GET_MODE_BITSIZE (op_mode)) 5390Sstevel@tonic-gate abort (); 5400Sstevel@tonic-gate val = arg0; 5410Sstevel@tonic-gate } 5420Sstevel@tonic-gate else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT) 5430Sstevel@tonic-gate val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode)); 5440Sstevel@tonic-gate else 5450Sstevel@tonic-gate return 0; 5460Sstevel@tonic-gate break; 5470Sstevel@tonic-gate 5480Sstevel@tonic-gate case SIGN_EXTEND: 5490Sstevel@tonic-gate if (op_mode == VOIDmode) 5500Sstevel@tonic-gate op_mode = mode; 5510Sstevel@tonic-gate if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT) 5520Sstevel@tonic-gate { 5530Sstevel@tonic-gate /* If we were really extending the mode, 5540Sstevel@tonic-gate we would have to distinguish between zero-extension 5550Sstevel@tonic-gate and sign-extension. */ 5560Sstevel@tonic-gate if (width != GET_MODE_BITSIZE (op_mode)) 5570Sstevel@tonic-gate abort (); 5580Sstevel@tonic-gate val = arg0; 5590Sstevel@tonic-gate } 5600Sstevel@tonic-gate else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT) 5610Sstevel@tonic-gate { 5620Sstevel@tonic-gate val 5630Sstevel@tonic-gate = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode)); 5640Sstevel@tonic-gate if (val 5650Sstevel@tonic-gate & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1))) 5660Sstevel@tonic-gate val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode); 5670Sstevel@tonic-gate } 5680Sstevel@tonic-gate else 5690Sstevel@tonic-gate return 0; 5700Sstevel@tonic-gate break; 5710Sstevel@tonic-gate 5720Sstevel@tonic-gate case SQRT: 5730Sstevel@tonic-gate case FLOAT_EXTEND: 5740Sstevel@tonic-gate case FLOAT_TRUNCATE: 5750Sstevel@tonic-gate case SS_TRUNCATE: 5760Sstevel@tonic-gate case US_TRUNCATE: 5770Sstevel@tonic-gate return 0; 5780Sstevel@tonic-gate 5790Sstevel@tonic-gate default: 5800Sstevel@tonic-gate abort (); 5810Sstevel@tonic-gate } 5820Sstevel@tonic-gate 5830Sstevel@tonic-gate val = trunc_int_for_mode (val, mode); 5840Sstevel@tonic-gate 5850Sstevel@tonic-gate return GEN_INT (val); 5860Sstevel@tonic-gate } 5870Sstevel@tonic-gate 5880Sstevel@tonic-gate /* We can do some operations on integer CONST_DOUBLEs. Also allow 5890Sstevel@tonic-gate for a DImode operation on a CONST_INT. */ 5900Sstevel@tonic-gate else if (GET_MODE (trueop) == VOIDmode && width <= HOST_BITS_PER_INT * 2 5910Sstevel@tonic-gate && (GET_CODE (trueop) == CONST_DOUBLE 5920Sstevel@tonic-gate || GET_CODE (trueop) == CONST_INT)) 5930Sstevel@tonic-gate { 5940Sstevel@tonic-gate unsigned HOST_WIDE_INT l1, lv; 5950Sstevel@tonic-gate HOST_WIDE_INT h1, hv; 5960Sstevel@tonic-gate 5970Sstevel@tonic-gate if (GET_CODE (trueop) == CONST_DOUBLE) 5980Sstevel@tonic-gate l1 = CONST_DOUBLE_LOW (trueop), h1 = CONST_DOUBLE_HIGH (trueop); 5990Sstevel@tonic-gate else 6000Sstevel@tonic-gate l1 = INTVAL (trueop), h1 = HWI_SIGN_EXTEND (l1); 6010Sstevel@tonic-gate 6020Sstevel@tonic-gate switch (code) 6030Sstevel@tonic-gate { 6040Sstevel@tonic-gate case NOT: 6050Sstevel@tonic-gate lv = ~ l1; 6060Sstevel@tonic-gate hv = ~ h1; 6070Sstevel@tonic-gate break; 6080Sstevel@tonic-gate 6090Sstevel@tonic-gate case NEG: 6100Sstevel@tonic-gate neg_double (l1, h1, &lv, &hv); 6110Sstevel@tonic-gate break; 6120Sstevel@tonic-gate 6130Sstevel@tonic-gate case ABS: 6140Sstevel@tonic-gate if (h1 < 0) 6150Sstevel@tonic-gate neg_double (l1, h1, &lv, &hv); 6160Sstevel@tonic-gate else 6170Sstevel@tonic-gate lv = l1, hv = h1; 6180Sstevel@tonic-gate break; 6190Sstevel@tonic-gate 6200Sstevel@tonic-gate case FFS: 6210Sstevel@tonic-gate hv = 0; 6220Sstevel@tonic-gate if (l1 == 0) 6230Sstevel@tonic-gate lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & (-h1)) + 1; 6240Sstevel@tonic-gate else 6250Sstevel@tonic-gate lv = exact_log2 (l1 & (-l1)) + 1; 6260Sstevel@tonic-gate break; 6270Sstevel@tonic-gate 6280Sstevel@tonic-gate case TRUNCATE: 6290Sstevel@tonic-gate /* This is just a change-of-mode, so do nothing. */ 6300Sstevel@tonic-gate lv = l1, hv = h1; 6310Sstevel@tonic-gate break; 6320Sstevel@tonic-gate 6330Sstevel@tonic-gate case ZERO_EXTEND: 6340Sstevel@tonic-gate if (op_mode == VOIDmode 6350Sstevel@tonic-gate || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT) 6360Sstevel@tonic-gate return 0; 6370Sstevel@tonic-gate 6380Sstevel@tonic-gate hv = 0; 6390Sstevel@tonic-gate lv = l1 & GET_MODE_MASK (op_mode); 6400Sstevel@tonic-gate break; 6410Sstevel@tonic-gate 6420Sstevel@tonic-gate case SIGN_EXTEND: 6430Sstevel@tonic-gate if (op_mode == VOIDmode 6440Sstevel@tonic-gate || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT) 6450Sstevel@tonic-gate return 0; 6460Sstevel@tonic-gate else 6470Sstevel@tonic-gate { 6480Sstevel@tonic-gate lv = l1 & GET_MODE_MASK (op_mode); 6490Sstevel@tonic-gate if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT 6500Sstevel@tonic-gate && (lv & ((HOST_WIDE_INT) 1 6510Sstevel@tonic-gate << (GET_MODE_BITSIZE (op_mode) - 1))) != 0) 6520Sstevel@tonic-gate lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode); 6530Sstevel@tonic-gate 6540Sstevel@tonic-gate hv = HWI_SIGN_EXTEND (lv); 6550Sstevel@tonic-gate } 6560Sstevel@tonic-gate break; 6570Sstevel@tonic-gate 6580Sstevel@tonic-gate case SQRT: 6590Sstevel@tonic-gate return 0; 6600Sstevel@tonic-gate 6610Sstevel@tonic-gate default: 6620Sstevel@tonic-gate return 0; 6630Sstevel@tonic-gate } 6640Sstevel@tonic-gate 6650Sstevel@tonic-gate return immed_double_const (lv, hv, mode); 6660Sstevel@tonic-gate } 6670Sstevel@tonic-gate 6680Sstevel@tonic-gate#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC) 6690Sstevel@tonic-gate else if (GET_CODE (trueop) == CONST_DOUBLE 6700Sstevel@tonic-gate && GET_MODE_CLASS (mode) == MODE_FLOAT) 6710Sstevel@tonic-gate { 6720Sstevel@tonic-gate struct simplify_unary_real_args args; 6730Sstevel@tonic-gate args.operand = trueop; 6740Sstevel@tonic-gate args.mode = mode; 6750Sstevel@tonic-gate args.code = code; 6760Sstevel@tonic-gate args.want_integer = false; 6770Sstevel@tonic-gate 6780Sstevel@tonic-gate if (do_float_handler (simplify_unary_real, (PTR) &args)) 6790Sstevel@tonic-gate return args.result; 6800Sstevel@tonic-gate 6810Sstevel@tonic-gate return 0; 6820Sstevel@tonic-gate } 6830Sstevel@tonic-gate 6840Sstevel@tonic-gate else if (GET_CODE (trueop) == CONST_DOUBLE 6850Sstevel@tonic-gate && GET_MODE_CLASS (GET_MODE (trueop)) == MODE_FLOAT 6860Sstevel@tonic-gate && GET_MODE_CLASS (mode) == MODE_INT 6870Sstevel@tonic-gate && width <= HOST_BITS_PER_WIDE_INT && width > 0) 6880Sstevel@tonic-gate { 6890Sstevel@tonic-gate struct simplify_unary_real_args args; 6900Sstevel@tonic-gate args.operand = trueop; 6910Sstevel@tonic-gate args.mode = mode; 6920Sstevel@tonic-gate args.code = code; 6930Sstevel@tonic-gate args.want_integer = true; 6940Sstevel@tonic-gate 6950Sstevel@tonic-gate if (do_float_handler (simplify_unary_real, (PTR) &args)) 6960Sstevel@tonic-gate return args.result; 6970Sstevel@tonic-gate 6980Sstevel@tonic-gate return 0; 6990Sstevel@tonic-gate } 7000Sstevel@tonic-gate#endif 7010Sstevel@tonic-gate /* This was formerly used only for non-IEEE float. 7020Sstevel@tonic-gate eggert@twinsun.com says it is safe for IEEE also. */ 7030Sstevel@tonic-gate else 7040Sstevel@tonic-gate { 7050Sstevel@tonic-gate enum rtx_code reversed; 7060Sstevel@tonic-gate /* There are some simplifications we can do even if the operands 7070Sstevel@tonic-gate aren't constant. */ 7080Sstevel@tonic-gate switch (code) 7090Sstevel@tonic-gate { 7100Sstevel@tonic-gate case NOT: 7110Sstevel@tonic-gate /* (not (not X)) == X. */ 7120Sstevel@tonic-gate if (GET_CODE (op) == NOT) 7130Sstevel@tonic-gate return XEXP (op, 0); 7140Sstevel@tonic-gate 7150Sstevel@tonic-gate /* (not (eq X Y)) == (ne X Y), etc. */ 7160Sstevel@tonic-gate if (mode == BImode && GET_RTX_CLASS (GET_CODE (op)) == '<' 7170Sstevel@tonic-gate && ((reversed = reversed_comparison_code (op, NULL_RTX)) 7180Sstevel@tonic-gate != UNKNOWN)) 7190Sstevel@tonic-gate return gen_rtx_fmt_ee (reversed, 7200Sstevel@tonic-gate op_mode, XEXP (op, 0), XEXP (op, 1)); 7210Sstevel@tonic-gate break; 7220Sstevel@tonic-gate 7230Sstevel@tonic-gate case NEG: 7240Sstevel@tonic-gate /* (neg (neg X)) == X. */ 7250Sstevel@tonic-gate if (GET_CODE (op) == NEG) 7260Sstevel@tonic-gate return XEXP (op, 0); 7270Sstevel@tonic-gate break; 7280Sstevel@tonic-gate 7290Sstevel@tonic-gate case SIGN_EXTEND: 7300Sstevel@tonic-gate /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2)))) 7310Sstevel@tonic-gate becomes just the MINUS if its mode is MODE. This allows 7320Sstevel@tonic-gate folding switch statements on machines using casesi (such as 7330Sstevel@tonic-gate the VAX). */ 7340Sstevel@tonic-gate if (GET_CODE (op) == TRUNCATE 7350Sstevel@tonic-gate && GET_MODE (XEXP (op, 0)) == mode 7360Sstevel@tonic-gate && GET_CODE (XEXP (op, 0)) == MINUS 7370Sstevel@tonic-gate && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF 7380Sstevel@tonic-gate && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF) 7390Sstevel@tonic-gate return XEXP (op, 0); 7400Sstevel@tonic-gate 7410Sstevel@tonic-gate#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend) 7420Sstevel@tonic-gate if (! POINTERS_EXTEND_UNSIGNED 7430Sstevel@tonic-gate && mode == Pmode && GET_MODE (op) == ptr_mode 7440Sstevel@tonic-gate && (CONSTANT_P (op) 7450Sstevel@tonic-gate || (GET_CODE (op) == SUBREG 7460Sstevel@tonic-gate && GET_CODE (SUBREG_REG (op)) == REG 7470Sstevel@tonic-gate && REG_POINTER (SUBREG_REG (op)) 7480Sstevel@tonic-gate && GET_MODE (SUBREG_REG (op)) == Pmode))) 7490Sstevel@tonic-gate return convert_memory_address (Pmode, op); 7500Sstevel@tonic-gate#endif 7510Sstevel@tonic-gate break; 7520Sstevel@tonic-gate 7530Sstevel@tonic-gate#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend) 7540Sstevel@tonic-gate case ZERO_EXTEND: 7550Sstevel@tonic-gate if (POINTERS_EXTEND_UNSIGNED > 0 7560Sstevel@tonic-gate && mode == Pmode && GET_MODE (op) == ptr_mode 7570Sstevel@tonic-gate && (CONSTANT_P (op) 7580Sstevel@tonic-gate || (GET_CODE (op) == SUBREG 7590Sstevel@tonic-gate && GET_CODE (SUBREG_REG (op)) == REG 7600Sstevel@tonic-gate && REG_POINTER (SUBREG_REG (op)) 7610Sstevel@tonic-gate && GET_MODE (SUBREG_REG (op)) == Pmode))) 7620Sstevel@tonic-gate return convert_memory_address (Pmode, op); 7630Sstevel@tonic-gate break; 7640Sstevel@tonic-gate#endif 7650Sstevel@tonic-gate 7660Sstevel@tonic-gate default: 7670Sstevel@tonic-gate break; 7680Sstevel@tonic-gate } 7690Sstevel@tonic-gate 7700Sstevel@tonic-gate return 0; 7710Sstevel@tonic-gate } 7720Sstevel@tonic-gate} 7730Sstevel@tonic-gate 7740Sstevel@tonic-gate#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC) 7750Sstevel@tonic-gate/* Subroutine of simplify_binary_operation, called via do_float_handler. 7760Sstevel@tonic-gate Handles simplification of binary ops on floating point values. */ 7770Sstevel@tonic-gatestruct simplify_binary_real_args 7780Sstevel@tonic-gate{ 7790Sstevel@tonic-gate rtx trueop0, trueop1; 7800Sstevel@tonic-gate rtx result; 7810Sstevel@tonic-gate enum rtx_code code; 7820Sstevel@tonic-gate enum machine_mode mode; 7830Sstevel@tonic-gate}; 7840Sstevel@tonic-gate 7850Sstevel@tonic-gatestatic void 7860Sstevel@tonic-gatesimplify_binary_real (p) 7870Sstevel@tonic-gate PTR p; 7880Sstevel@tonic-gate{ 7890Sstevel@tonic-gate REAL_VALUE_TYPE f0, f1, value; 7900Sstevel@tonic-gate struct simplify_binary_real_args *args = 7910Sstevel@tonic-gate (struct simplify_binary_real_args *) p; 7920Sstevel@tonic-gate 7930Sstevel@tonic-gate REAL_VALUE_FROM_CONST_DOUBLE (f0, args->trueop0); 7940Sstevel@tonic-gate REAL_VALUE_FROM_CONST_DOUBLE (f1, args->trueop1); 7950Sstevel@tonic-gate f0 = real_value_truncate (args->mode, f0); 7960Sstevel@tonic-gate f1 = real_value_truncate (args->mode, f1); 7970Sstevel@tonic-gate 7980Sstevel@tonic-gate#ifdef REAL_ARITHMETIC 7990Sstevel@tonic-gate#ifndef REAL_INFINITY 8000Sstevel@tonic-gate if (args->code == DIV && REAL_VALUES_EQUAL (f1, dconst0)) 8010Sstevel@tonic-gate { 8020Sstevel@tonic-gate args->result = 0; 8030Sstevel@tonic-gate return; 8040Sstevel@tonic-gate } 8050Sstevel@tonic-gate#endif 8060Sstevel@tonic-gate REAL_ARITHMETIC (value, rtx_to_tree_code (args->code), f0, f1); 8070Sstevel@tonic-gate#else 8080Sstevel@tonic-gate switch (args->code) 8090Sstevel@tonic-gate { 8100Sstevel@tonic-gate case PLUS: 8110Sstevel@tonic-gate value = f0 + f1; 8120Sstevel@tonic-gate break; 8130Sstevel@tonic-gate case MINUS: 8140Sstevel@tonic-gate value = f0 - f1; 8150Sstevel@tonic-gate break; 8160Sstevel@tonic-gate case MULT: 8170Sstevel@tonic-gate value = f0 * f1; 8180Sstevel@tonic-gate break; 8190Sstevel@tonic-gate case DIV: 8200Sstevel@tonic-gate#ifndef REAL_INFINITY 8210Sstevel@tonic-gate if (f1 == 0) 8220Sstevel@tonic-gate return 0; 8230Sstevel@tonic-gate#endif 8240Sstevel@tonic-gate value = f0 / f1; 8250Sstevel@tonic-gate break; 8260Sstevel@tonic-gate case SMIN: 8270Sstevel@tonic-gate value = MIN (f0, f1); 8280Sstevel@tonic-gate break; 8290Sstevel@tonic-gate case SMAX: 8300Sstevel@tonic-gate value = MAX (f0, f1); 8310Sstevel@tonic-gate break; 8320Sstevel@tonic-gate default: 8330Sstevel@tonic-gate abort (); 8340Sstevel@tonic-gate } 8350Sstevel@tonic-gate#endif 8360Sstevel@tonic-gate 8370Sstevel@tonic-gate value = real_value_truncate (args->mode, value); 8380Sstevel@tonic-gate args->result = CONST_DOUBLE_FROM_REAL_VALUE (value, args->mode); 8390Sstevel@tonic-gate} 8400Sstevel@tonic-gate#endif 8410Sstevel@tonic-gate 8420Sstevel@tonic-gate/* Another subroutine called via do_float_handler. This one tests 8430Sstevel@tonic-gate the floating point value given against 2. and -1. */ 8440Sstevel@tonic-gatestruct simplify_binary_is2orm1_args 8450Sstevel@tonic-gate{ 8460Sstevel@tonic-gate rtx value; 8470Sstevel@tonic-gate bool is_2; 8480Sstevel@tonic-gate bool is_m1; 8490Sstevel@tonic-gate}; 8500Sstevel@tonic-gate 8510Sstevel@tonic-gatestatic void 8520Sstevel@tonic-gatesimplify_binary_is2orm1 (p) 8530Sstevel@tonic-gate PTR p; 8540Sstevel@tonic-gate{ 8550Sstevel@tonic-gate REAL_VALUE_TYPE d; 8560Sstevel@tonic-gate struct simplify_binary_is2orm1_args *args = 8570Sstevel@tonic-gate (struct simplify_binary_is2orm1_args *) p; 8580Sstevel@tonic-gate 8590Sstevel@tonic-gate REAL_VALUE_FROM_CONST_DOUBLE (d, args->value); 8600Sstevel@tonic-gate args->is_2 = REAL_VALUES_EQUAL (d, dconst2); 8610Sstevel@tonic-gate args->is_m1 = REAL_VALUES_EQUAL (d, dconstm1); 8620Sstevel@tonic-gate} 8630Sstevel@tonic-gate 8640Sstevel@tonic-gate/* Simplify a binary operation CODE with result mode MODE, operating on OP0 8650Sstevel@tonic-gate and OP1. Return 0 if no simplification is possible. 8660Sstevel@tonic-gate 8670Sstevel@tonic-gate Don't use this for relational operations such as EQ or LT. 8680Sstevel@tonic-gate Use simplify_relational_operation instead. */ 8690Sstevel@tonic-gatertx 8700Sstevel@tonic-gatesimplify_binary_operation (code, mode, op0, op1) 8710Sstevel@tonic-gate enum rtx_code code; 8720Sstevel@tonic-gate enum machine_mode mode; 8730Sstevel@tonic-gate rtx op0, op1; 8740Sstevel@tonic-gate{ 8750Sstevel@tonic-gate HOST_WIDE_INT arg0, arg1, arg0s, arg1s; 8760Sstevel@tonic-gate HOST_WIDE_INT val; 8770Sstevel@tonic-gate unsigned int width = GET_MODE_BITSIZE (mode); 8780Sstevel@tonic-gate rtx tem; 8790Sstevel@tonic-gate rtx trueop0 = avoid_constant_pool_reference (op0); 8800Sstevel@tonic-gate rtx trueop1 = avoid_constant_pool_reference (op1); 8810Sstevel@tonic-gate 8820Sstevel@tonic-gate /* Relational operations don't work here. We must know the mode 8830Sstevel@tonic-gate of the operands in order to do the comparison correctly. 8840Sstevel@tonic-gate Assuming a full word can give incorrect results. 8850Sstevel@tonic-gate Consider comparing 128 with -128 in QImode. */ 8860Sstevel@tonic-gate 8870Sstevel@tonic-gate if (GET_RTX_CLASS (code) == '<') 8880Sstevel@tonic-gate abort (); 8890Sstevel@tonic-gate 8900Sstevel@tonic-gate /* Make sure the constant is second. */ 8910Sstevel@tonic-gate if (GET_RTX_CLASS (code) == 'c' 8920Sstevel@tonic-gate && swap_commutative_operands_p (trueop0, trueop1)) 8930Sstevel@tonic-gate { 8940Sstevel@tonic-gate tem = op0, op0 = op1, op1 = tem; 8950Sstevel@tonic-gate tem = trueop0, trueop0 = trueop1, trueop1 = tem; 8960Sstevel@tonic-gate } 8970Sstevel@tonic-gate 8980Sstevel@tonic-gate#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC) 8990Sstevel@tonic-gate if (GET_MODE_CLASS (mode) == MODE_FLOAT 9000Sstevel@tonic-gate && GET_CODE (trueop0) == CONST_DOUBLE 9010Sstevel@tonic-gate && GET_CODE (trueop1) == CONST_DOUBLE 9020Sstevel@tonic-gate && mode == GET_MODE (op0) && mode == GET_MODE (op1)) 9030Sstevel@tonic-gate { 9040Sstevel@tonic-gate struct simplify_binary_real_args args; 9050Sstevel@tonic-gate args.trueop0 = trueop0; 9060Sstevel@tonic-gate args.trueop1 = trueop1; 9070Sstevel@tonic-gate args.mode = mode; 9080Sstevel@tonic-gate args.code = code; 9090Sstevel@tonic-gate 9100Sstevel@tonic-gate if (do_float_handler (simplify_binary_real, (PTR) &args)) 9110Sstevel@tonic-gate return args.result; 9120Sstevel@tonic-gate return 0; 9130Sstevel@tonic-gate } 9140Sstevel@tonic-gate#endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */ 9150Sstevel@tonic-gate 9160Sstevel@tonic-gate /* We can fold some multi-word operations. */ 9170Sstevel@tonic-gate if (GET_MODE_CLASS (mode) == MODE_INT 9180Sstevel@tonic-gate && width == HOST_BITS_PER_WIDE_INT * 2 9190Sstevel@tonic-gate && (GET_CODE (trueop0) == CONST_DOUBLE 9200Sstevel@tonic-gate || GET_CODE (trueop0) == CONST_INT) 9210Sstevel@tonic-gate && (GET_CODE (trueop1) == CONST_DOUBLE 9220Sstevel@tonic-gate || GET_CODE (trueop1) == CONST_INT)) 9230Sstevel@tonic-gate { 9240Sstevel@tonic-gate unsigned HOST_WIDE_INT l1, l2, lv; 9250Sstevel@tonic-gate HOST_WIDE_INT h1, h2, hv; 9260Sstevel@tonic-gate 9270Sstevel@tonic-gate if (GET_CODE (trueop0) == CONST_DOUBLE) 9280Sstevel@tonic-gate l1 = CONST_DOUBLE_LOW (trueop0), h1 = CONST_DOUBLE_HIGH (trueop0); 9290Sstevel@tonic-gate else 9300Sstevel@tonic-gate l1 = INTVAL (trueop0), h1 = HWI_SIGN_EXTEND (l1); 9310Sstevel@tonic-gate 9320Sstevel@tonic-gate if (GET_CODE (trueop1) == CONST_DOUBLE) 9330Sstevel@tonic-gate l2 = CONST_DOUBLE_LOW (trueop1), h2 = CONST_DOUBLE_HIGH (trueop1); 9340Sstevel@tonic-gate else 9350Sstevel@tonic-gate l2 = INTVAL (trueop1), h2 = HWI_SIGN_EXTEND (l2); 9360Sstevel@tonic-gate 9370Sstevel@tonic-gate switch (code) 9380Sstevel@tonic-gate { 9390Sstevel@tonic-gate case MINUS: 9400Sstevel@tonic-gate /* A - B == A + (-B). */ 9410Sstevel@tonic-gate neg_double (l2, h2, &lv, &hv); 9420Sstevel@tonic-gate l2 = lv, h2 = hv; 9430Sstevel@tonic-gate 9440Sstevel@tonic-gate /* .. fall through ... */ 9450Sstevel@tonic-gate 9460Sstevel@tonic-gate case PLUS: 9470Sstevel@tonic-gate add_double (l1, h1, l2, h2, &lv, &hv); 9480Sstevel@tonic-gate break; 9490Sstevel@tonic-gate 9500Sstevel@tonic-gate case MULT: 9510Sstevel@tonic-gate mul_double (l1, h1, l2, h2, &lv, &hv); 9520Sstevel@tonic-gate break; 9530Sstevel@tonic-gate 9540Sstevel@tonic-gate case DIV: case MOD: case UDIV: case UMOD: 9550Sstevel@tonic-gate /* We'd need to include tree.h to do this and it doesn't seem worth 9560Sstevel@tonic-gate it. */ 9570Sstevel@tonic-gate return 0; 9580Sstevel@tonic-gate 9590Sstevel@tonic-gate case AND: 9600Sstevel@tonic-gate lv = l1 & l2, hv = h1 & h2; 9610Sstevel@tonic-gate break; 9620Sstevel@tonic-gate 9630Sstevel@tonic-gate case IOR: 9640Sstevel@tonic-gate lv = l1 | l2, hv = h1 | h2; 9650Sstevel@tonic-gate break; 9660Sstevel@tonic-gate 9670Sstevel@tonic-gate case XOR: 9680Sstevel@tonic-gate lv = l1 ^ l2, hv = h1 ^ h2; 9690Sstevel@tonic-gate break; 9700Sstevel@tonic-gate 9710Sstevel@tonic-gate case SMIN: 9720Sstevel@tonic-gate if (h1 < h2 9730Sstevel@tonic-gate || (h1 == h2 9740Sstevel@tonic-gate && ((unsigned HOST_WIDE_INT) l1 9750Sstevel@tonic-gate < (unsigned HOST_WIDE_INT) l2))) 9760Sstevel@tonic-gate lv = l1, hv = h1; 9770Sstevel@tonic-gate else 9780Sstevel@tonic-gate lv = l2, hv = h2; 9790Sstevel@tonic-gate break; 9800Sstevel@tonic-gate 9810Sstevel@tonic-gate case SMAX: 9820Sstevel@tonic-gate if (h1 > h2 9830Sstevel@tonic-gate || (h1 == h2 9840Sstevel@tonic-gate && ((unsigned HOST_WIDE_INT) l1 9850Sstevel@tonic-gate > (unsigned HOST_WIDE_INT) l2))) 9860Sstevel@tonic-gate lv = l1, hv = h1; 9870Sstevel@tonic-gate else 9880Sstevel@tonic-gate lv = l2, hv = h2; 9890Sstevel@tonic-gate break; 9900Sstevel@tonic-gate 9910Sstevel@tonic-gate case UMIN: 9920Sstevel@tonic-gate if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2 9930Sstevel@tonic-gate || (h1 == h2 9940Sstevel@tonic-gate && ((unsigned HOST_WIDE_INT) l1 9950Sstevel@tonic-gate < (unsigned HOST_WIDE_INT) l2))) 9960Sstevel@tonic-gate lv = l1, hv = h1; 9970Sstevel@tonic-gate else 9980Sstevel@tonic-gate lv = l2, hv = h2; 9990Sstevel@tonic-gate break; 10000Sstevel@tonic-gate 10010Sstevel@tonic-gate case UMAX: 10020Sstevel@tonic-gate if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2 10030Sstevel@tonic-gate || (h1 == h2 10040Sstevel@tonic-gate && ((unsigned HOST_WIDE_INT) l1 10050Sstevel@tonic-gate > (unsigned HOST_WIDE_INT) l2))) 10060Sstevel@tonic-gate lv = l1, hv = h1; 10070Sstevel@tonic-gate else 10080Sstevel@tonic-gate lv = l2, hv = h2; 10090Sstevel@tonic-gate break; 10100Sstevel@tonic-gate 10110Sstevel@tonic-gate case LSHIFTRT: case ASHIFTRT: 10120Sstevel@tonic-gate case ASHIFT: 10130Sstevel@tonic-gate case ROTATE: case ROTATERT: 10140Sstevel@tonic-gate#ifdef SHIFT_COUNT_TRUNCATED 10150Sstevel@tonic-gate if (SHIFT_COUNT_TRUNCATED) 10160Sstevel@tonic-gate l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0; 10170Sstevel@tonic-gate#endif 10180Sstevel@tonic-gate 10190Sstevel@tonic-gate if (h2 != 0 || l2 >= GET_MODE_BITSIZE (mode)) 10200Sstevel@tonic-gate return 0; 10210Sstevel@tonic-gate 10220Sstevel@tonic-gate if (code == LSHIFTRT || code == ASHIFTRT) 10230Sstevel@tonic-gate rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 10240Sstevel@tonic-gate code == ASHIFTRT); 10250Sstevel@tonic-gate else if (code == ASHIFT) 10260Sstevel@tonic-gate lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 1); 10270Sstevel@tonic-gate else if (code == ROTATE) 10280Sstevel@tonic-gate lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv); 10290Sstevel@tonic-gate else /* code == ROTATERT */ 10300Sstevel@tonic-gate rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv); 10310Sstevel@tonic-gate break; 10320Sstevel@tonic-gate 10330Sstevel@tonic-gate default: 10340Sstevel@tonic-gate return 0; 10350Sstevel@tonic-gate } 10360Sstevel@tonic-gate 10370Sstevel@tonic-gate return immed_double_const (lv, hv, mode); 10380Sstevel@tonic-gate } 10390Sstevel@tonic-gate 10400Sstevel@tonic-gate if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT 10410Sstevel@tonic-gate || width > HOST_BITS_PER_WIDE_INT || width == 0) 10420Sstevel@tonic-gate { 10430Sstevel@tonic-gate /* Even if we can't compute a constant result, 10440Sstevel@tonic-gate there are some cases worth simplifying. */ 10450Sstevel@tonic-gate 10460Sstevel@tonic-gate switch (code) 10470Sstevel@tonic-gate { 10480Sstevel@tonic-gate case PLUS: 10490Sstevel@tonic-gate /* In IEEE floating point, x+0 is not the same as x. Similarly 10500Sstevel@tonic-gate for the other optimizations below. */ 10510Sstevel@tonic-gate if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT 10520Sstevel@tonic-gate && FLOAT_MODE_P (mode) && ! flag_unsafe_math_optimizations) 10530Sstevel@tonic-gate break; 10540Sstevel@tonic-gate 10550Sstevel@tonic-gate if (trueop1 == CONST0_RTX (mode)) 10560Sstevel@tonic-gate return op0; 10570Sstevel@tonic-gate 10580Sstevel@tonic-gate /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */ 10590Sstevel@tonic-gate if (GET_CODE (op0) == NEG) 10600Sstevel@tonic-gate return simplify_gen_binary (MINUS, mode, op1, XEXP (op0, 0)); 10610Sstevel@tonic-gate else if (GET_CODE (op1) == NEG) 10620Sstevel@tonic-gate return simplify_gen_binary (MINUS, mode, op0, XEXP (op1, 0)); 10630Sstevel@tonic-gate 10640Sstevel@tonic-gate /* (~a) + 1 -> -a */ 10650Sstevel@tonic-gate if (INTEGRAL_MODE_P (mode) 10660Sstevel@tonic-gate && GET_CODE (op0) == NOT 10670Sstevel@tonic-gate && trueop1 == const1_rtx) 10680Sstevel@tonic-gate return gen_rtx_NEG (mode, XEXP (op0, 0)); 10690Sstevel@tonic-gate 10700Sstevel@tonic-gate /* Handle both-operands-constant cases. We can only add 10710Sstevel@tonic-gate CONST_INTs to constants since the sum of relocatable symbols 10720Sstevel@tonic-gate can't be handled by most assemblers. Don't add CONST_INT 10730Sstevel@tonic-gate to CONST_INT since overflow won't be computed properly if wider 10740Sstevel@tonic-gate than HOST_BITS_PER_WIDE_INT. */ 10750Sstevel@tonic-gate 10760Sstevel@tonic-gate if (CONSTANT_P (op0) && GET_MODE (op0) != VOIDmode 10770Sstevel@tonic-gate && GET_CODE (op1) == CONST_INT) 10780Sstevel@tonic-gate return plus_constant (op0, INTVAL (op1)); 10790Sstevel@tonic-gate else if (CONSTANT_P (op1) && GET_MODE (op1) != VOIDmode 10800Sstevel@tonic-gate && GET_CODE (op0) == CONST_INT) 10810Sstevel@tonic-gate return plus_constant (op1, INTVAL (op0)); 10820Sstevel@tonic-gate 10830Sstevel@tonic-gate /* See if this is something like X * C - X or vice versa or 10840Sstevel@tonic-gate if the multiplication is written as a shift. If so, we can 10850Sstevel@tonic-gate distribute and make a new multiply, shift, or maybe just 10860Sstevel@tonic-gate have X (if C is 2 in the example above). But don't make 10870Sstevel@tonic-gate real multiply if we didn't have one before. */ 10880Sstevel@tonic-gate 10890Sstevel@tonic-gate if (! FLOAT_MODE_P (mode)) 10900Sstevel@tonic-gate { 10910Sstevel@tonic-gate HOST_WIDE_INT coeff0 = 1, coeff1 = 1; 10920Sstevel@tonic-gate rtx lhs = op0, rhs = op1; 10930Sstevel@tonic-gate int had_mult = 0; 10940Sstevel@tonic-gate 10950Sstevel@tonic-gate if (GET_CODE (lhs) == NEG) 10960Sstevel@tonic-gate coeff0 = -1, lhs = XEXP (lhs, 0); 10970Sstevel@tonic-gate else if (GET_CODE (lhs) == MULT 10980Sstevel@tonic-gate && GET_CODE (XEXP (lhs, 1)) == CONST_INT) 10990Sstevel@tonic-gate { 11000Sstevel@tonic-gate coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0); 11010Sstevel@tonic-gate had_mult = 1; 11020Sstevel@tonic-gate } 11030Sstevel@tonic-gate else if (GET_CODE (lhs) == ASHIFT 11040Sstevel@tonic-gate && GET_CODE (XEXP (lhs, 1)) == CONST_INT 11050Sstevel@tonic-gate && INTVAL (XEXP (lhs, 1)) >= 0 11060Sstevel@tonic-gate && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT) 11070Sstevel@tonic-gate { 11080Sstevel@tonic-gate coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1)); 11090Sstevel@tonic-gate lhs = XEXP (lhs, 0); 11100Sstevel@tonic-gate } 11110Sstevel@tonic-gate 11120Sstevel@tonic-gate if (GET_CODE (rhs) == NEG) 11130Sstevel@tonic-gate coeff1 = -1, rhs = XEXP (rhs, 0); 11140Sstevel@tonic-gate else if (GET_CODE (rhs) == MULT 11150Sstevel@tonic-gate && GET_CODE (XEXP (rhs, 1)) == CONST_INT) 11160Sstevel@tonic-gate { 11170Sstevel@tonic-gate coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0); 11180Sstevel@tonic-gate had_mult = 1; 11190Sstevel@tonic-gate } 11200Sstevel@tonic-gate else if (GET_CODE (rhs) == ASHIFT 11210Sstevel@tonic-gate && GET_CODE (XEXP (rhs, 1)) == CONST_INT 11220Sstevel@tonic-gate && INTVAL (XEXP (rhs, 1)) >= 0 11230Sstevel@tonic-gate && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT) 11240Sstevel@tonic-gate { 11250Sstevel@tonic-gate coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1)); 11260Sstevel@tonic-gate rhs = XEXP (rhs, 0); 11270Sstevel@tonic-gate } 11280Sstevel@tonic-gate 11290Sstevel@tonic-gate if (rtx_equal_p (lhs, rhs)) 11300Sstevel@tonic-gate { 11310Sstevel@tonic-gate tem = simplify_gen_binary (MULT, mode, lhs, 11320Sstevel@tonic-gate GEN_INT (coeff0 + coeff1)); 11330Sstevel@tonic-gate return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem; 11340Sstevel@tonic-gate } 11350Sstevel@tonic-gate } 11360Sstevel@tonic-gate 11370Sstevel@tonic-gate /* If one of the operands is a PLUS or a MINUS, see if we can 11380Sstevel@tonic-gate simplify this by the associative law. 11390Sstevel@tonic-gate Don't use the associative law for floating point. 11400Sstevel@tonic-gate The inaccuracy makes it nonassociative, 11410Sstevel@tonic-gate and subtle programs can break if operations are associated. */ 11420Sstevel@tonic-gate 11430Sstevel@tonic-gate if (INTEGRAL_MODE_P (mode) 11440Sstevel@tonic-gate && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS 11450Sstevel@tonic-gate || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS 11460Sstevel@tonic-gate || (GET_CODE (op0) == CONST 11470Sstevel@tonic-gate && GET_CODE (XEXP (op0, 0)) == PLUS) 11480Sstevel@tonic-gate || (GET_CODE (op1) == CONST 11490Sstevel@tonic-gate && GET_CODE (XEXP (op1, 0)) == PLUS)) 11500Sstevel@tonic-gate && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0) 11510Sstevel@tonic-gate return tem; 11520Sstevel@tonic-gate break; 11530Sstevel@tonic-gate 11540Sstevel@tonic-gate case COMPARE: 11550Sstevel@tonic-gate#ifdef HAVE_cc0 11560Sstevel@tonic-gate /* Convert (compare FOO (const_int 0)) to FOO unless we aren't 11570Sstevel@tonic-gate using cc0, in which case we want to leave it as a COMPARE 11580Sstevel@tonic-gate so we can distinguish it from a register-register-copy. 11590Sstevel@tonic-gate 11600Sstevel@tonic-gate In IEEE floating point, x-0 is not the same as x. */ 11610Sstevel@tonic-gate 11620Sstevel@tonic-gate if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT 11630Sstevel@tonic-gate || ! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations) 11640Sstevel@tonic-gate && trueop1 == CONST0_RTX (mode)) 11650Sstevel@tonic-gate return op0; 11660Sstevel@tonic-gate#endif 11670Sstevel@tonic-gate 11680Sstevel@tonic-gate /* Convert (compare (gt (flags) 0) (lt (flags) 0)) to (flags). */ 11690Sstevel@tonic-gate if (((GET_CODE (op0) == GT && GET_CODE (op1) == LT) 11700Sstevel@tonic-gate || (GET_CODE (op0) == GTU && GET_CODE (op1) == LTU)) 11710Sstevel@tonic-gate && XEXP (op0, 1) == const0_rtx && XEXP (op1, 1) == const0_rtx) 11720Sstevel@tonic-gate { 11730Sstevel@tonic-gate rtx xop00 = XEXP (op0, 0); 11740Sstevel@tonic-gate rtx xop10 = XEXP (op1, 0); 11750Sstevel@tonic-gate 11760Sstevel@tonic-gate#ifdef HAVE_cc0 11770Sstevel@tonic-gate if (GET_CODE (xop00) == CC0 && GET_CODE (xop10) == CC0) 11780Sstevel@tonic-gate#else 11790Sstevel@tonic-gate if (GET_CODE (xop00) == REG && GET_CODE (xop10) == REG 11800Sstevel@tonic-gate && GET_MODE (xop00) == GET_MODE (xop10) 11810Sstevel@tonic-gate && REGNO (xop00) == REGNO (xop10) 11820Sstevel@tonic-gate && GET_MODE_CLASS (GET_MODE (xop00)) == MODE_CC 11830Sstevel@tonic-gate && GET_MODE_CLASS (GET_MODE (xop10)) == MODE_CC) 11840Sstevel@tonic-gate#endif 11850Sstevel@tonic-gate return xop00; 11860Sstevel@tonic-gate } 11870Sstevel@tonic-gate break; 11880Sstevel@tonic-gate 11890Sstevel@tonic-gate case MINUS: 11900Sstevel@tonic-gate /* None of these optimizations can be done for IEEE 11910Sstevel@tonic-gate floating point. */ 11920Sstevel@tonic-gate if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT 11930Sstevel@tonic-gate && FLOAT_MODE_P (mode) && ! flag_unsafe_math_optimizations) 11940Sstevel@tonic-gate break; 11950Sstevel@tonic-gate 11960Sstevel@tonic-gate /* We can't assume x-x is 0 even with non-IEEE floating point, 11970Sstevel@tonic-gate but since it is zero except in very strange circumstances, we 11980Sstevel@tonic-gate will treat it as zero with -funsafe-math-optimizations. */ 11990Sstevel@tonic-gate if (rtx_equal_p (trueop0, trueop1) 12000Sstevel@tonic-gate && ! side_effects_p (op0) 12010Sstevel@tonic-gate && (! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)) 12020Sstevel@tonic-gate return CONST0_RTX (mode); 12030Sstevel@tonic-gate 12040Sstevel@tonic-gate /* Change subtraction from zero into negation. */ 12050Sstevel@tonic-gate if (trueop0 == CONST0_RTX (mode)) 12060Sstevel@tonic-gate return gen_rtx_NEG (mode, op1); 12070Sstevel@tonic-gate 12080Sstevel@tonic-gate /* (-1 - a) is ~a. */ 12090Sstevel@tonic-gate if (trueop0 == constm1_rtx) 12100Sstevel@tonic-gate return gen_rtx_NOT (mode, op1); 12110Sstevel@tonic-gate 12120Sstevel@tonic-gate /* Subtracting 0 has no effect. */ 12130Sstevel@tonic-gate if (trueop1 == CONST0_RTX (mode)) 12140Sstevel@tonic-gate return op0; 12150Sstevel@tonic-gate 12160Sstevel@tonic-gate /* See if this is something like X * C - X or vice versa or 12170Sstevel@tonic-gate if the multiplication is written as a shift. If so, we can 12180Sstevel@tonic-gate distribute and make a new multiply, shift, or maybe just 12190Sstevel@tonic-gate have X (if C is 2 in the example above). But don't make 12200Sstevel@tonic-gate real multiply if we didn't have one before. */ 12210Sstevel@tonic-gate 12220Sstevel@tonic-gate if (! FLOAT_MODE_P (mode)) 12230Sstevel@tonic-gate { 12240Sstevel@tonic-gate HOST_WIDE_INT coeff0 = 1, coeff1 = 1; 12250Sstevel@tonic-gate rtx lhs = op0, rhs = op1; 12260Sstevel@tonic-gate int had_mult = 0; 12270Sstevel@tonic-gate 12280Sstevel@tonic-gate if (GET_CODE (lhs) == NEG) 12290Sstevel@tonic-gate coeff0 = -1, lhs = XEXP (lhs, 0); 12300Sstevel@tonic-gate else if (GET_CODE (lhs) == MULT 12310Sstevel@tonic-gate && GET_CODE (XEXP (lhs, 1)) == CONST_INT) 12320Sstevel@tonic-gate { 12330Sstevel@tonic-gate coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0); 12340Sstevel@tonic-gate had_mult = 1; 12350Sstevel@tonic-gate } 12360Sstevel@tonic-gate else if (GET_CODE (lhs) == ASHIFT 12370Sstevel@tonic-gate && GET_CODE (XEXP (lhs, 1)) == CONST_INT 12380Sstevel@tonic-gate && INTVAL (XEXP (lhs, 1)) >= 0 12390Sstevel@tonic-gate && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT) 12400Sstevel@tonic-gate { 12410Sstevel@tonic-gate coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1)); 12420Sstevel@tonic-gate lhs = XEXP (lhs, 0); 12430Sstevel@tonic-gate } 12440Sstevel@tonic-gate 12450Sstevel@tonic-gate if (GET_CODE (rhs) == NEG) 12460Sstevel@tonic-gate coeff1 = - 1, rhs = XEXP (rhs, 0); 12470Sstevel@tonic-gate else if (GET_CODE (rhs) == MULT 12480Sstevel@tonic-gate && GET_CODE (XEXP (rhs, 1)) == CONST_INT) 12490Sstevel@tonic-gate { 12500Sstevel@tonic-gate coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0); 12510Sstevel@tonic-gate had_mult = 1; 12520Sstevel@tonic-gate } 12530Sstevel@tonic-gate else if (GET_CODE (rhs) == ASHIFT 12540Sstevel@tonic-gate && GET_CODE (XEXP (rhs, 1)) == CONST_INT 12550Sstevel@tonic-gate && INTVAL (XEXP (rhs, 1)) >= 0 12560Sstevel@tonic-gate && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT) 12570Sstevel@tonic-gate { 12580Sstevel@tonic-gate coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1)); 12590Sstevel@tonic-gate rhs = XEXP (rhs, 0); 12600Sstevel@tonic-gate } 12610Sstevel@tonic-gate 12620Sstevel@tonic-gate if (rtx_equal_p (lhs, rhs)) 12630Sstevel@tonic-gate { 12640Sstevel@tonic-gate tem = simplify_gen_binary (MULT, mode, lhs, 12650Sstevel@tonic-gate GEN_INT (coeff0 - coeff1)); 12660Sstevel@tonic-gate return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem; 12670Sstevel@tonic-gate } 12680Sstevel@tonic-gate } 12690Sstevel@tonic-gate 12700Sstevel@tonic-gate /* (a - (-b)) -> (a + b). */ 12710Sstevel@tonic-gate if (GET_CODE (op1) == NEG) 12720Sstevel@tonic-gate return simplify_gen_binary (PLUS, mode, op0, XEXP (op1, 0)); 12730Sstevel@tonic-gate 12740Sstevel@tonic-gate /* If one of the operands is a PLUS or a MINUS, see if we can 12750Sstevel@tonic-gate simplify this by the associative law. 12760Sstevel@tonic-gate Don't use the associative law for floating point. 12770Sstevel@tonic-gate The inaccuracy makes it nonassociative, 12780Sstevel@tonic-gate and subtle programs can break if operations are associated. */ 12790Sstevel@tonic-gate 12800Sstevel@tonic-gate if (INTEGRAL_MODE_P (mode) 12810Sstevel@tonic-gate && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS 12820Sstevel@tonic-gate || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS 12830Sstevel@tonic-gate || (GET_CODE (op0) == CONST 12840Sstevel@tonic-gate && GET_CODE (XEXP (op0, 0)) == PLUS) 12850Sstevel@tonic-gate || (GET_CODE (op1) == CONST 12860Sstevel@tonic-gate && GET_CODE (XEXP (op1, 0)) == PLUS)) 12870Sstevel@tonic-gate && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0) 12880Sstevel@tonic-gate return tem; 12890Sstevel@tonic-gate 12900Sstevel@tonic-gate /* Don't let a relocatable value get a negative coeff. */ 12910Sstevel@tonic-gate if (GET_CODE (op1) == CONST_INT && GET_MODE (op0) != VOIDmode) 12920Sstevel@tonic-gate return simplify_gen_binary (PLUS, mode, 12930Sstevel@tonic-gate op0, 12940Sstevel@tonic-gate neg_const_int (mode, op1)); 12950Sstevel@tonic-gate 12960Sstevel@tonic-gate /* (x - (x & y)) -> (x & ~y) */ 12970Sstevel@tonic-gate if (GET_CODE (op1) == AND) 12980Sstevel@tonic-gate { 12990Sstevel@tonic-gate if (rtx_equal_p (op0, XEXP (op1, 0))) 13000Sstevel@tonic-gate return simplify_gen_binary (AND, mode, op0, 13010Sstevel@tonic-gate gen_rtx_NOT (mode, XEXP (op1, 1))); 13020Sstevel@tonic-gate if (rtx_equal_p (op0, XEXP (op1, 1))) 13030Sstevel@tonic-gate return simplify_gen_binary (AND, mode, op0, 13040Sstevel@tonic-gate gen_rtx_NOT (mode, XEXP (op1, 0))); 13050Sstevel@tonic-gate } 13060Sstevel@tonic-gate break; 13070Sstevel@tonic-gate 13080Sstevel@tonic-gate case MULT: 13090Sstevel@tonic-gate if (trueop1 == constm1_rtx) 13100Sstevel@tonic-gate { 13110Sstevel@tonic-gate tem = simplify_unary_operation (NEG, mode, op0, mode); 13120Sstevel@tonic-gate 13130Sstevel@tonic-gate return tem ? tem : gen_rtx_NEG (mode, op0); 13140Sstevel@tonic-gate } 13150Sstevel@tonic-gate 13160Sstevel@tonic-gate /* In IEEE floating point, x*0 is not always 0. */ 13170Sstevel@tonic-gate if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT 13180Sstevel@tonic-gate || ! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations) 13190Sstevel@tonic-gate && trueop1 == CONST0_RTX (mode) 13200Sstevel@tonic-gate && ! side_effects_p (op0)) 13210Sstevel@tonic-gate return op1; 13220Sstevel@tonic-gate 13230Sstevel@tonic-gate /* In IEEE floating point, x*1 is not equivalent to x for nans. 13240Sstevel@tonic-gate However, ANSI says we can drop signals, 13250Sstevel@tonic-gate so we can do this anyway. */ 13260Sstevel@tonic-gate if (trueop1 == CONST1_RTX (mode)) 13270Sstevel@tonic-gate return op0; 13280Sstevel@tonic-gate 13290Sstevel@tonic-gate /* Convert multiply by constant power of two into shift unless 13300Sstevel@tonic-gate we are still generating RTL. This test is a kludge. */ 13310Sstevel@tonic-gate if (GET_CODE (trueop1) == CONST_INT 13320Sstevel@tonic-gate && (val = exact_log2 (INTVAL (trueop1))) >= 0 13330Sstevel@tonic-gate /* If the mode is larger than the host word size, and the 13340Sstevel@tonic-gate uppermost bit is set, then this isn't a power of two due 13350Sstevel@tonic-gate to implicit sign extension. */ 13360Sstevel@tonic-gate && (width <= HOST_BITS_PER_WIDE_INT 13370Sstevel@tonic-gate || val != HOST_BITS_PER_WIDE_INT - 1) 13380Sstevel@tonic-gate && ! rtx_equal_function_value_matters) 13390Sstevel@tonic-gate return gen_rtx_ASHIFT (mode, op0, GEN_INT (val)); 13400Sstevel@tonic-gate 13410Sstevel@tonic-gate if (GET_CODE (trueop1) == CONST_DOUBLE 13420Sstevel@tonic-gate && GET_MODE_CLASS (GET_MODE (trueop1)) == MODE_FLOAT) 13430Sstevel@tonic-gate { 13440Sstevel@tonic-gate struct simplify_binary_is2orm1_args args; 13450Sstevel@tonic-gate 13460Sstevel@tonic-gate args.value = trueop1; 13470Sstevel@tonic-gate if (! do_float_handler (simplify_binary_is2orm1, (PTR) &args)) 13480Sstevel@tonic-gate return 0; 13490Sstevel@tonic-gate 13500Sstevel@tonic-gate /* x*2 is x+x and x*(-1) is -x */ 13510Sstevel@tonic-gate if (args.is_2 && GET_MODE (op0) == mode) 13520Sstevel@tonic-gate return gen_rtx_PLUS (mode, op0, copy_rtx (op0)); 13530Sstevel@tonic-gate 13540Sstevel@tonic-gate else if (args.is_m1 && GET_MODE (op0) == mode) 13550Sstevel@tonic-gate return gen_rtx_NEG (mode, op0); 13560Sstevel@tonic-gate } 13570Sstevel@tonic-gate break; 13580Sstevel@tonic-gate 13590Sstevel@tonic-gate case IOR: 13600Sstevel@tonic-gate if (trueop1 == const0_rtx) 13610Sstevel@tonic-gate return op0; 13620Sstevel@tonic-gate if (GET_CODE (trueop1) == CONST_INT 13630Sstevel@tonic-gate && ((INTVAL (trueop1) & GET_MODE_MASK (mode)) 13640Sstevel@tonic-gate == GET_MODE_MASK (mode))) 13650Sstevel@tonic-gate return op1; 13660Sstevel@tonic-gate if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0)) 13670Sstevel@tonic-gate return op0; 13680Sstevel@tonic-gate /* A | (~A) -> -1 */ 13690Sstevel@tonic-gate if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1)) 13700Sstevel@tonic-gate || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0))) 13710Sstevel@tonic-gate && ! side_effects_p (op0) 13720Sstevel@tonic-gate && GET_MODE_CLASS (mode) != MODE_CC) 13730Sstevel@tonic-gate return constm1_rtx; 13740Sstevel@tonic-gate break; 13750Sstevel@tonic-gate 13760Sstevel@tonic-gate case XOR: 13770Sstevel@tonic-gate if (trueop1 == const0_rtx) 13780Sstevel@tonic-gate return op0; 13790Sstevel@tonic-gate if (GET_CODE (trueop1) == CONST_INT 13800Sstevel@tonic-gate && ((INTVAL (trueop1) & GET_MODE_MASK (mode)) 13810Sstevel@tonic-gate == GET_MODE_MASK (mode))) 13820Sstevel@tonic-gate return gen_rtx_NOT (mode, op0); 13830Sstevel@tonic-gate if (trueop0 == trueop1 && ! side_effects_p (op0) 13840Sstevel@tonic-gate && GET_MODE_CLASS (mode) != MODE_CC) 13850Sstevel@tonic-gate return const0_rtx; 13860Sstevel@tonic-gate break; 13870Sstevel@tonic-gate 13880Sstevel@tonic-gate case AND: 13890Sstevel@tonic-gate if (trueop1 == const0_rtx && ! side_effects_p (op0)) 13900Sstevel@tonic-gate return const0_rtx; 13910Sstevel@tonic-gate if (GET_CODE (trueop1) == CONST_INT 13920Sstevel@tonic-gate && ((INTVAL (trueop1) & GET_MODE_MASK (mode)) 13930Sstevel@tonic-gate == GET_MODE_MASK (mode))) 13940Sstevel@tonic-gate return op0; 13950Sstevel@tonic-gate if (trueop0 == trueop1 && ! side_effects_p (op0) 13960Sstevel@tonic-gate && GET_MODE_CLASS (mode) != MODE_CC) 13970Sstevel@tonic-gate return op0; 13980Sstevel@tonic-gate /* A & (~A) -> 0 */ 13990Sstevel@tonic-gate if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1)) 14000Sstevel@tonic-gate || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0))) 14010Sstevel@tonic-gate && ! side_effects_p (op0) 14020Sstevel@tonic-gate && GET_MODE_CLASS (mode) != MODE_CC) 14030Sstevel@tonic-gate return const0_rtx; 14040Sstevel@tonic-gate break; 14050Sstevel@tonic-gate 14060Sstevel@tonic-gate case UDIV: 14070Sstevel@tonic-gate /* Convert divide by power of two into shift (divide by 1 handled 14080Sstevel@tonic-gate below). */ 14090Sstevel@tonic-gate if (GET_CODE (trueop1) == CONST_INT 14100Sstevel@tonic-gate && (arg1 = exact_log2 (INTVAL (trueop1))) > 0) 14110Sstevel@tonic-gate return gen_rtx_LSHIFTRT (mode, op0, GEN_INT (arg1)); 14120Sstevel@tonic-gate 14130Sstevel@tonic-gate /* ... fall through ... */ 14140Sstevel@tonic-gate 14150Sstevel@tonic-gate case DIV: 14160Sstevel@tonic-gate if (trueop1 == CONST1_RTX (mode)) 14170Sstevel@tonic-gate { 14180Sstevel@tonic-gate /* On some platforms DIV uses narrower mode than its 14190Sstevel@tonic-gate operands. */ 14200Sstevel@tonic-gate rtx x = gen_lowpart_common (mode, op0); 14210Sstevel@tonic-gate if (x) 14220Sstevel@tonic-gate return x; 14230Sstevel@tonic-gate else if (mode != GET_MODE (op0) && GET_MODE (op0) != VOIDmode) 14240Sstevel@tonic-gate return gen_lowpart_SUBREG (mode, op0); 14250Sstevel@tonic-gate else 14260Sstevel@tonic-gate return op0; 14270Sstevel@tonic-gate } 14280Sstevel@tonic-gate 14290Sstevel@tonic-gate /* In IEEE floating point, 0/x is not always 0. */ 14300Sstevel@tonic-gate if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT 14310Sstevel@tonic-gate || ! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations) 14320Sstevel@tonic-gate && trueop0 == CONST0_RTX (mode) 14330Sstevel@tonic-gate && ! side_effects_p (op1)) 14340Sstevel@tonic-gate return op0; 14350Sstevel@tonic-gate 14360Sstevel@tonic-gate#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC) 14370Sstevel@tonic-gate /* Change division by a constant into multiplication. Only do 14380Sstevel@tonic-gate this with -funsafe-math-optimizations. */ 14390Sstevel@tonic-gate else if (GET_CODE (trueop1) == CONST_DOUBLE 14400Sstevel@tonic-gate && GET_MODE_CLASS (GET_MODE (trueop1)) == MODE_FLOAT 14410Sstevel@tonic-gate && trueop1 != CONST0_RTX (mode) 14420Sstevel@tonic-gate && flag_unsafe_math_optimizations) 14430Sstevel@tonic-gate { 14440Sstevel@tonic-gate REAL_VALUE_TYPE d; 14450Sstevel@tonic-gate REAL_VALUE_FROM_CONST_DOUBLE (d, trueop1); 14460Sstevel@tonic-gate 14470Sstevel@tonic-gate if (! REAL_VALUES_EQUAL (d, dconst0)) 14480Sstevel@tonic-gate { 14490Sstevel@tonic-gate#if defined (REAL_ARITHMETIC) 14500Sstevel@tonic-gate REAL_ARITHMETIC (d, rtx_to_tree_code (DIV), dconst1, d); 14510Sstevel@tonic-gate return gen_rtx_MULT (mode, op0, 14520Sstevel@tonic-gate CONST_DOUBLE_FROM_REAL_VALUE (d, mode)); 14530Sstevel@tonic-gate#else 14540Sstevel@tonic-gate return 14550Sstevel@tonic-gate gen_rtx_MULT (mode, op0, 14560Sstevel@tonic-gate CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode)); 14570Sstevel@tonic-gate#endif 14580Sstevel@tonic-gate } 14590Sstevel@tonic-gate } 14600Sstevel@tonic-gate#endif 14610Sstevel@tonic-gate break; 14620Sstevel@tonic-gate 14630Sstevel@tonic-gate case UMOD: 14640Sstevel@tonic-gate /* Handle modulus by power of two (mod with 1 handled below). */ 14650Sstevel@tonic-gate if (GET_CODE (trueop1) == CONST_INT 14660Sstevel@tonic-gate && exact_log2 (INTVAL (trueop1)) > 0) 14670Sstevel@tonic-gate return gen_rtx_AND (mode, op0, GEN_INT (INTVAL (op1) - 1)); 14680Sstevel@tonic-gate 14690Sstevel@tonic-gate /* ... fall through ... */ 14700Sstevel@tonic-gate 14710Sstevel@tonic-gate case MOD: 14720Sstevel@tonic-gate if ((trueop0 == const0_rtx || trueop1 == const1_rtx) 14730Sstevel@tonic-gate && ! side_effects_p (op0) && ! side_effects_p (op1)) 14740Sstevel@tonic-gate return const0_rtx; 14750Sstevel@tonic-gate break; 14760Sstevel@tonic-gate 14770Sstevel@tonic-gate case ROTATERT: 14780Sstevel@tonic-gate case ROTATE: 14790Sstevel@tonic-gate /* Rotating ~0 always results in ~0. */ 14800Sstevel@tonic-gate if (GET_CODE (trueop0) == CONST_INT && width <= HOST_BITS_PER_WIDE_INT 14810Sstevel@tonic-gate && (unsigned HOST_WIDE_INT) INTVAL (trueop0) == GET_MODE_MASK (mode) 14820Sstevel@tonic-gate && ! side_effects_p (op1)) 14830Sstevel@tonic-gate return op0; 14840Sstevel@tonic-gate 14850Sstevel@tonic-gate /* ... fall through ... */ 14860Sstevel@tonic-gate 14870Sstevel@tonic-gate case ASHIFT: 14880Sstevel@tonic-gate case ASHIFTRT: 14890Sstevel@tonic-gate case LSHIFTRT: 14900Sstevel@tonic-gate if (trueop1 == const0_rtx) 14910Sstevel@tonic-gate return op0; 14920Sstevel@tonic-gate if (trueop0 == const0_rtx && ! side_effects_p (op1)) 14930Sstevel@tonic-gate return op0; 14940Sstevel@tonic-gate break; 14950Sstevel@tonic-gate 14960Sstevel@tonic-gate case SMIN: 14970Sstevel@tonic-gate if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (trueop1) == CONST_INT 14980Sstevel@tonic-gate && INTVAL (trueop1) == (HOST_WIDE_INT) 1 << (width -1) 14990Sstevel@tonic-gate && ! side_effects_p (op0)) 15000Sstevel@tonic-gate return op1; 15010Sstevel@tonic-gate else if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0)) 15020Sstevel@tonic-gate return op0; 15030Sstevel@tonic-gate break; 15040Sstevel@tonic-gate 15050Sstevel@tonic-gate case SMAX: 15060Sstevel@tonic-gate if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (trueop1) == CONST_INT 15070Sstevel@tonic-gate && ((unsigned HOST_WIDE_INT) INTVAL (trueop1) 15080Sstevel@tonic-gate == (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1) 15090Sstevel@tonic-gate && ! side_effects_p (op0)) 15100Sstevel@tonic-gate return op1; 15110Sstevel@tonic-gate else if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0)) 15120Sstevel@tonic-gate return op0; 15130Sstevel@tonic-gate break; 15140Sstevel@tonic-gate 15150Sstevel@tonic-gate case UMIN: 15160Sstevel@tonic-gate if (trueop1 == const0_rtx && ! side_effects_p (op0)) 15170Sstevel@tonic-gate return op1; 15180Sstevel@tonic-gate else if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0)) 15190Sstevel@tonic-gate return op0; 15200Sstevel@tonic-gate break; 15210Sstevel@tonic-gate 15220Sstevel@tonic-gate case UMAX: 15230Sstevel@tonic-gate if (trueop1 == constm1_rtx && ! side_effects_p (op0)) 15240Sstevel@tonic-gate return op1; 15250Sstevel@tonic-gate else if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0)) 15260Sstevel@tonic-gate return op0; 15270Sstevel@tonic-gate break; 15280Sstevel@tonic-gate 15290Sstevel@tonic-gate case SS_PLUS: 15300Sstevel@tonic-gate case US_PLUS: 15310Sstevel@tonic-gate case SS_MINUS: 15320Sstevel@tonic-gate case US_MINUS: 15330Sstevel@tonic-gate /* ??? There are simplifications that can be done. */ 15340Sstevel@tonic-gate return 0; 15350Sstevel@tonic-gate 15360Sstevel@tonic-gate default: 15370Sstevel@tonic-gate abort (); 15380Sstevel@tonic-gate } 15390Sstevel@tonic-gate 15400Sstevel@tonic-gate return 0; 15410Sstevel@tonic-gate } 15420Sstevel@tonic-gate 15430Sstevel@tonic-gate /* Get the integer argument values in two forms: 15440Sstevel@tonic-gate zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */ 15450Sstevel@tonic-gate 15460Sstevel@tonic-gate arg0 = INTVAL (trueop0); 15470Sstevel@tonic-gate arg1 = INTVAL (trueop1); 15480Sstevel@tonic-gate 15490Sstevel@tonic-gate if (width < HOST_BITS_PER_WIDE_INT) 15500Sstevel@tonic-gate { 15510Sstevel@tonic-gate arg0 &= ((HOST_WIDE_INT) 1 << width) - 1; 15520Sstevel@tonic-gate arg1 &= ((HOST_WIDE_INT) 1 << width) - 1; 15530Sstevel@tonic-gate 15540Sstevel@tonic-gate arg0s = arg0; 15550Sstevel@tonic-gate if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1))) 15560Sstevel@tonic-gate arg0s |= ((HOST_WIDE_INT) (-1) << width); 15570Sstevel@tonic-gate 15580Sstevel@tonic-gate arg1s = arg1; 15590Sstevel@tonic-gate if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1))) 15600Sstevel@tonic-gate arg1s |= ((HOST_WIDE_INT) (-1) << width); 15610Sstevel@tonic-gate } 15620Sstevel@tonic-gate else 15630Sstevel@tonic-gate { 15640Sstevel@tonic-gate arg0s = arg0; 15650Sstevel@tonic-gate arg1s = arg1; 15660Sstevel@tonic-gate } 15670Sstevel@tonic-gate 15680Sstevel@tonic-gate /* Compute the value of the arithmetic. */ 15690Sstevel@tonic-gate 15700Sstevel@tonic-gate switch (code) 15710Sstevel@tonic-gate { 15720Sstevel@tonic-gate case PLUS: 15730Sstevel@tonic-gate val = arg0s + arg1s; 15740Sstevel@tonic-gate break; 15750Sstevel@tonic-gate 15760Sstevel@tonic-gate case MINUS: 15770Sstevel@tonic-gate val = arg0s - arg1s; 15780Sstevel@tonic-gate break; 15790Sstevel@tonic-gate 15800Sstevel@tonic-gate case MULT: 15810Sstevel@tonic-gate val = arg0s * arg1s; 15820Sstevel@tonic-gate break; 15830Sstevel@tonic-gate 15840Sstevel@tonic-gate case DIV: 15850Sstevel@tonic-gate if (arg1s == 0 15860Sstevel@tonic-gate || (arg0s == (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1) 15870Sstevel@tonic-gate && arg1s == -1)) 15880Sstevel@tonic-gate return 0; 15890Sstevel@tonic-gate val = arg0s / arg1s; 15900Sstevel@tonic-gate break; 15910Sstevel@tonic-gate 15920Sstevel@tonic-gate case MOD: 15930Sstevel@tonic-gate if (arg1s == 0 15940Sstevel@tonic-gate || (arg0s == (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1) 15950Sstevel@tonic-gate && arg1s == -1)) 15960Sstevel@tonic-gate return 0; 15970Sstevel@tonic-gate val = arg0s % arg1s; 15980Sstevel@tonic-gate break; 15990Sstevel@tonic-gate 16000Sstevel@tonic-gate case UDIV: 16010Sstevel@tonic-gate if (arg1 == 0 16020Sstevel@tonic-gate || (arg0s == (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1) 16030Sstevel@tonic-gate && arg1s == -1)) 16040Sstevel@tonic-gate return 0; 16050Sstevel@tonic-gate val = (unsigned HOST_WIDE_INT) arg0 / arg1; 16060Sstevel@tonic-gate break; 16070Sstevel@tonic-gate 16080Sstevel@tonic-gate case UMOD: 16090Sstevel@tonic-gate if (arg1 == 0 16100Sstevel@tonic-gate || (arg0s == (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1) 16110Sstevel@tonic-gate && arg1s == -1)) 16120Sstevel@tonic-gate return 0; 16130Sstevel@tonic-gate val = (unsigned HOST_WIDE_INT) arg0 % arg1; 16140Sstevel@tonic-gate break; 16150Sstevel@tonic-gate 16160Sstevel@tonic-gate case AND: 16170Sstevel@tonic-gate val = arg0 & arg1; 16180Sstevel@tonic-gate break; 16190Sstevel@tonic-gate 16200Sstevel@tonic-gate case IOR: 16210Sstevel@tonic-gate val = arg0 | arg1; 16220Sstevel@tonic-gate break; 16230Sstevel@tonic-gate 16240Sstevel@tonic-gate case XOR: 16250Sstevel@tonic-gate val = arg0 ^ arg1; 16260Sstevel@tonic-gate break; 16270Sstevel@tonic-gate 16280Sstevel@tonic-gate case LSHIFTRT: 16290Sstevel@tonic-gate /* If shift count is undefined, don't fold it; let the machine do 16300Sstevel@tonic-gate what it wants. But truncate it if the machine will do that. */ 16310Sstevel@tonic-gate if (arg1 < 0) 16320Sstevel@tonic-gate return 0; 16330Sstevel@tonic-gate 16340Sstevel@tonic-gate#ifdef SHIFT_COUNT_TRUNCATED 16350Sstevel@tonic-gate if (SHIFT_COUNT_TRUNCATED) 16360Sstevel@tonic-gate arg1 %= width; 16370Sstevel@tonic-gate#endif 16380Sstevel@tonic-gate 16390Sstevel@tonic-gate val = ((unsigned HOST_WIDE_INT) arg0) >> arg1; 16400Sstevel@tonic-gate break; 16410Sstevel@tonic-gate 16420Sstevel@tonic-gate case ASHIFT: 16430Sstevel@tonic-gate if (arg1 < 0) 16440Sstevel@tonic-gate return 0; 16450Sstevel@tonic-gate 16460Sstevel@tonic-gate#ifdef SHIFT_COUNT_TRUNCATED 16470Sstevel@tonic-gate if (SHIFT_COUNT_TRUNCATED) 16480Sstevel@tonic-gate arg1 %= width; 16490Sstevel@tonic-gate#endif 16500Sstevel@tonic-gate 16510Sstevel@tonic-gate val = ((unsigned HOST_WIDE_INT) arg0) << arg1; 16520Sstevel@tonic-gate break; 16530Sstevel@tonic-gate 16540Sstevel@tonic-gate case ASHIFTRT: 16550Sstevel@tonic-gate if (arg1 < 0) 16560Sstevel@tonic-gate return 0; 16570Sstevel@tonic-gate 16580Sstevel@tonic-gate#ifdef SHIFT_COUNT_TRUNCATED 16590Sstevel@tonic-gate if (SHIFT_COUNT_TRUNCATED) 16600Sstevel@tonic-gate arg1 %= width; 16610Sstevel@tonic-gate#endif 16620Sstevel@tonic-gate 16630Sstevel@tonic-gate val = arg0s >> arg1; 16640Sstevel@tonic-gate 16650Sstevel@tonic-gate /* Bootstrap compiler may not have sign extended the right shift. 16660Sstevel@tonic-gate Manually extend the sign to insure bootstrap cc matches gcc. */ 16670Sstevel@tonic-gate if (arg0s < 0 && arg1 > 0) 16680Sstevel@tonic-gate val |= ((HOST_WIDE_INT) -1) << (HOST_BITS_PER_WIDE_INT - arg1); 16690Sstevel@tonic-gate 16700Sstevel@tonic-gate break; 16710Sstevel@tonic-gate 16720Sstevel@tonic-gate case ROTATERT: 16730Sstevel@tonic-gate if (arg1 < 0) 16740Sstevel@tonic-gate return 0; 16750Sstevel@tonic-gate 1676 arg1 %= width; 1677 val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1)) 1678 | (((unsigned HOST_WIDE_INT) arg0) >> arg1)); 1679 break; 1680 1681 case ROTATE: 1682 if (arg1 < 0) 1683 return 0; 1684 1685 arg1 %= width; 1686 val = ((((unsigned HOST_WIDE_INT) arg0) << arg1) 1687 | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1))); 1688 break; 1689 1690 case COMPARE: 1691 /* Do nothing here. */ 1692 return 0; 1693 1694 case SMIN: 1695 val = arg0s <= arg1s ? arg0s : arg1s; 1696 break; 1697 1698 case UMIN: 1699 val = ((unsigned HOST_WIDE_INT) arg0 1700 <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1); 1701 break; 1702 1703 case SMAX: 1704 val = arg0s > arg1s ? arg0s : arg1s; 1705 break; 1706 1707 case UMAX: 1708 val = ((unsigned HOST_WIDE_INT) arg0 1709 > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1); 1710 break; 1711 1712 default: 1713 abort (); 1714 } 1715 1716 val = trunc_int_for_mode (val, mode); 1717 1718 return GEN_INT (val); 1719} 1720 1721/* Simplify a PLUS or MINUS, at least one of whose operands may be another 1722 PLUS or MINUS. 1723 1724 Rather than test for specific case, we do this by a brute-force method 1725 and do all possible simplifications until no more changes occur. Then 1726 we rebuild the operation. */ 1727 1728struct simplify_plus_minus_op_data 1729{ 1730 rtx op; 1731 int neg; 1732}; 1733 1734static int 1735simplify_plus_minus_op_data_cmp (p1, p2) 1736 const void *p1; 1737 const void *p2; 1738{ 1739 const struct simplify_plus_minus_op_data *d1 = p1; 1740 const struct simplify_plus_minus_op_data *d2 = p2; 1741 1742 return (commutative_operand_precedence (d2->op) 1743 - commutative_operand_precedence (d1->op)); 1744} 1745 1746static rtx 1747simplify_plus_minus (code, mode, op0, op1) 1748 enum rtx_code code; 1749 enum machine_mode mode; 1750 rtx op0, op1; 1751{ 1752 struct simplify_plus_minus_op_data ops[8]; 1753 rtx result, tem; 1754 int n_ops = 2, input_ops = 2, input_consts = 0, n_consts; 1755 int first, negate, changed; 1756 int i, j; 1757 1758 memset ((char *) ops, 0, sizeof ops); 1759 1760 /* Set up the two operands and then expand them until nothing has been 1761 changed. If we run out of room in our array, give up; this should 1762 almost never happen. */ 1763 1764 ops[0].op = op0; 1765 ops[0].neg = 0; 1766 ops[1].op = op1; 1767 ops[1].neg = (code == MINUS); 1768 1769 do 1770 { 1771 changed = 0; 1772 1773 for (i = 0; i < n_ops; i++) 1774 { 1775 rtx this_op = ops[i].op; 1776 int this_neg = ops[i].neg; 1777 enum rtx_code this_code = GET_CODE (this_op); 1778 1779 switch (this_code) 1780 { 1781 case PLUS: 1782 case MINUS: 1783 if (n_ops == 7) 1784 return 0; 1785 1786 ops[n_ops].op = XEXP (this_op, 1); 1787 ops[n_ops].neg = (this_code == MINUS) ^ this_neg; 1788 n_ops++; 1789 1790 ops[i].op = XEXP (this_op, 0); 1791 input_ops++; 1792 changed = 1; 1793 break; 1794 1795 case NEG: 1796 ops[i].op = XEXP (this_op, 0); 1797 ops[i].neg = ! this_neg; 1798 changed = 1; 1799 break; 1800 1801 case CONST: 1802 ops[i].op = XEXP (this_op, 0); 1803 input_consts++; 1804 changed = 1; 1805 break; 1806 1807 case NOT: 1808 /* ~a -> (-a - 1) */ 1809 if (n_ops != 7) 1810 { 1811 ops[n_ops].op = constm1_rtx; 1812 ops[n_ops++].neg = this_neg; 1813 ops[i].op = XEXP (this_op, 0); 1814 ops[i].neg = !this_neg; 1815 changed = 1; 1816 } 1817 break; 1818 1819 case CONST_INT: 1820 if (this_neg) 1821 { 1822 ops[i].op = neg_const_int (mode, this_op); 1823 ops[i].neg = 0; 1824 changed = 1; 1825 } 1826 break; 1827 1828 default: 1829 break; 1830 } 1831 } 1832 } 1833 while (changed); 1834 1835 /* If we only have two operands, we can't do anything. */ 1836 if (n_ops <= 2) 1837 return NULL_RTX; 1838 1839 /* Now simplify each pair of operands until nothing changes. The first 1840 time through just simplify constants against each other. */ 1841 1842 first = 1; 1843 do 1844 { 1845 changed = first; 1846 1847 for (i = 0; i < n_ops - 1; i++) 1848 for (j = i + 1; j < n_ops; j++) 1849 { 1850 rtx lhs = ops[i].op, rhs = ops[j].op; 1851 int lneg = ops[i].neg, rneg = ops[j].neg; 1852 1853 if (lhs != 0 && rhs != 0 1854 && (! first || (CONSTANT_P (lhs) && CONSTANT_P (rhs)))) 1855 { 1856 enum rtx_code ncode = PLUS; 1857 1858 if (lneg != rneg) 1859 { 1860 ncode = MINUS; 1861 if (lneg) 1862 tem = lhs, lhs = rhs, rhs = tem; 1863 } 1864 else if (swap_commutative_operands_p (lhs, rhs)) 1865 tem = lhs, lhs = rhs, rhs = tem; 1866 1867 tem = simplify_binary_operation (ncode, mode, lhs, rhs); 1868 1869 /* Reject "simplifications" that just wrap the two 1870 arguments in a CONST. Failure to do so can result 1871 in infinite recursion with simplify_binary_operation 1872 when it calls us to simplify CONST operations. */ 1873 if (tem 1874 && ! (GET_CODE (tem) == CONST 1875 && GET_CODE (XEXP (tem, 0)) == ncode 1876 && XEXP (XEXP (tem, 0), 0) == lhs 1877 && XEXP (XEXP (tem, 0), 1) == rhs) 1878 /* Don't allow -x + -1 -> ~x simplifications in the 1879 first pass. This allows us the chance to combine 1880 the -1 with other constants. */ 1881 && ! (first 1882 && GET_CODE (tem) == NOT 1883 && XEXP (tem, 0) == rhs)) 1884 { 1885 lneg &= rneg; 1886 if (GET_CODE (tem) == NEG) 1887 tem = XEXP (tem, 0), lneg = !lneg; 1888 if (GET_CODE (tem) == CONST_INT && lneg) 1889 tem = neg_const_int (mode, tem), lneg = 0; 1890 1891 ops[i].op = tem; 1892 ops[i].neg = lneg; 1893 ops[j].op = NULL_RTX; 1894 changed = 1; 1895 } 1896 } 1897 } 1898 1899 first = 0; 1900 } 1901 while (changed); 1902 1903 /* Pack all the operands to the lower-numbered entries. */ 1904 for (i = 0, j = 0; j < n_ops; j++) 1905 if (ops[j].op) 1906 ops[i++] = ops[j]; 1907 n_ops = i; 1908 1909 /* Sort the operations based on swap_commutative_operands_p. */ 1910 qsort (ops, n_ops, sizeof (*ops), simplify_plus_minus_op_data_cmp); 1911 1912 /* We suppressed creation of trivial CONST expressions in the 1913 combination loop to avoid recursion. Create one manually now. 1914 The combination loop should have ensured that there is exactly 1915 one CONST_INT, and the sort will have ensured that it is last 1916 in the array and that any other constant will be next-to-last. */ 1917 1918 if (n_ops > 1 1919 && GET_CODE (ops[n_ops - 1].op) == CONST_INT 1920 && CONSTANT_P (ops[n_ops - 2].op)) 1921 { 1922 rtx value = ops[n_ops - 1].op; 1923 if (ops[n_ops - 1].neg ^ ops[n_ops - 2].neg) 1924 value = neg_const_int (mode, value); 1925 ops[n_ops - 2].op = plus_constant (ops[n_ops - 2].op, INTVAL (value)); 1926 n_ops--; 1927 } 1928 1929 /* Count the number of CONSTs that we generated. */ 1930 n_consts = 0; 1931 for (i = 0; i < n_ops; i++) 1932 if (GET_CODE (ops[i].op) == CONST) 1933 n_consts++; 1934 1935 /* Give up if we didn't reduce the number of operands we had. Make 1936 sure we count a CONST as two operands. If we have the same 1937 number of operands, but have made more CONSTs than before, this 1938 is also an improvement, so accept it. */ 1939 if (n_ops + n_consts > input_ops 1940 || (n_ops + n_consts == input_ops && n_consts <= input_consts)) 1941 return NULL_RTX; 1942 1943 /* Put a non-negated operand first. If there aren't any, make all 1944 operands positive and negate the whole thing later. */ 1945 1946 negate = 0; 1947 for (i = 0; i < n_ops && ops[i].neg; i++) 1948 continue; 1949 if (i == n_ops) 1950 { 1951 for (i = 0; i < n_ops; i++) 1952 ops[i].neg = 0; 1953 negate = 1; 1954 } 1955 else if (i != 0) 1956 { 1957 tem = ops[0].op; 1958 ops[0] = ops[i]; 1959 ops[i].op = tem; 1960 ops[i].neg = 1; 1961 } 1962 1963 /* Now make the result by performing the requested operations. */ 1964 result = ops[0].op; 1965 for (i = 1; i < n_ops; i++) 1966 result = gen_rtx_fmt_ee (ops[i].neg ? MINUS : PLUS, 1967 mode, result, ops[i].op); 1968 1969 return negate ? gen_rtx_NEG (mode, result) : result; 1970} 1971 1972struct cfc_args 1973{ 1974 rtx op0, op1; /* Input */ 1975 int equal, op0lt, op1lt; /* Output */ 1976 int unordered; 1977}; 1978 1979static void 1980check_fold_consts (data) 1981 PTR data; 1982{ 1983 struct cfc_args *args = (struct cfc_args *) data; 1984 REAL_VALUE_TYPE d0, d1; 1985 1986 /* We may possibly raise an exception while reading the value. */ 1987 args->unordered = 1; 1988 REAL_VALUE_FROM_CONST_DOUBLE (d0, args->op0); 1989 REAL_VALUE_FROM_CONST_DOUBLE (d1, args->op1); 1990 1991 /* Comparisons of Inf versus Inf are ordered. */ 1992 if (REAL_VALUE_ISNAN (d0) 1993 || REAL_VALUE_ISNAN (d1)) 1994 return; 1995 args->equal = REAL_VALUES_EQUAL (d0, d1); 1996 args->op0lt = REAL_VALUES_LESS (d0, d1); 1997 args->op1lt = REAL_VALUES_LESS (d1, d0); 1998 args->unordered = 0; 1999} 2000 2001/* Like simplify_binary_operation except used for relational operators. 2002 MODE is the mode of the operands, not that of the result. If MODE 2003 is VOIDmode, both operands must also be VOIDmode and we compare the 2004 operands in "infinite precision". 2005 2006 If no simplification is possible, this function returns zero. Otherwise, 2007 it returns either const_true_rtx or const0_rtx. */ 2008 2009rtx 2010simplify_relational_operation (code, mode, op0, op1) 2011 enum rtx_code code; 2012 enum machine_mode mode; 2013 rtx op0, op1; 2014{ 2015 int equal, op0lt, op0ltu, op1lt, op1ltu; 2016 rtx tem; 2017 rtx trueop0; 2018 rtx trueop1; 2019 2020 if (mode == VOIDmode 2021 && (GET_MODE (op0) != VOIDmode 2022 || GET_MODE (op1) != VOIDmode)) 2023 abort (); 2024 2025 /* If op0 is a compare, extract the comparison arguments from it. */ 2026 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx) 2027 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0); 2028 2029 trueop0 = avoid_constant_pool_reference (op0); 2030 trueop1 = avoid_constant_pool_reference (op1); 2031 2032 /* We can't simplify MODE_CC values since we don't know what the 2033 actual comparison is. */ 2034 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC 2035#ifdef HAVE_cc0 2036 || op0 == cc0_rtx 2037#endif 2038 ) 2039 return 0; 2040 2041 /* Make sure the constant is second. */ 2042 if (swap_commutative_operands_p (trueop0, trueop1)) 2043 { 2044 tem = op0, op0 = op1, op1 = tem; 2045 tem = trueop0, trueop0 = trueop1, trueop1 = tem; 2046 code = swap_condition (code); 2047 } 2048 2049 /* For integer comparisons of A and B maybe we can simplify A - B and can 2050 then simplify a comparison of that with zero. If A and B are both either 2051 a register or a CONST_INT, this can't help; testing for these cases will 2052 prevent infinite recursion here and speed things up. 2053 2054 If CODE is an unsigned comparison, then we can never do this optimization, 2055 because it gives an incorrect result if the subtraction wraps around zero. 2056 ANSI C defines unsigned operations such that they never overflow, and 2057 thus such cases can not be ignored. */ 2058 2059 if (INTEGRAL_MODE_P (mode) && trueop1 != const0_rtx 2060 && ! ((GET_CODE (op0) == REG || GET_CODE (trueop0) == CONST_INT) 2061 && (GET_CODE (op1) == REG || GET_CODE (trueop1) == CONST_INT)) 2062 && 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1)) 2063 && code != GTU && code != GEU && code != LTU && code != LEU) 2064 return simplify_relational_operation (signed_condition (code), 2065 mode, tem, const0_rtx); 2066 2067 if (flag_unsafe_math_optimizations && code == ORDERED) 2068 return const_true_rtx; 2069 2070 if (flag_unsafe_math_optimizations && code == UNORDERED) 2071 return const0_rtx; 2072 2073 /* For non-IEEE floating-point, if the two operands are equal, we know the 2074 result. */ 2075 if (rtx_equal_p (trueop0, trueop1) 2076 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT 2077 || ! FLOAT_MODE_P (GET_MODE (trueop0)) 2078 || flag_unsafe_math_optimizations)) 2079 equal = 1, op0lt = 0, op0ltu = 0, op1lt = 0, op1ltu = 0; 2080 2081 /* If the operands are floating-point constants, see if we can fold 2082 the result. */ 2083#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC) 2084 else if (GET_CODE (trueop0) == CONST_DOUBLE 2085 && GET_CODE (trueop1) == CONST_DOUBLE 2086 && GET_MODE_CLASS (GET_MODE (trueop0)) == MODE_FLOAT) 2087 { 2088 struct cfc_args args; 2089 2090 /* Setup input for check_fold_consts() */ 2091 args.op0 = trueop0; 2092 args.op1 = trueop1; 2093 2094 2095 if (!do_float_handler (check_fold_consts, (PTR) &args)) 2096 args.unordered = 1; 2097 2098 if (args.unordered) 2099 switch (code) 2100 { 2101 case UNEQ: 2102 case UNLT: 2103 case UNGT: 2104 case UNLE: 2105 case UNGE: 2106 case NE: 2107 case UNORDERED: 2108 return const_true_rtx; 2109 case EQ: 2110 case LT: 2111 case GT: 2112 case LE: 2113 case GE: 2114 case LTGT: 2115 case ORDERED: 2116 return const0_rtx; 2117 default: 2118 return 0; 2119 } 2120 2121 /* Receive output from check_fold_consts() */ 2122 equal = args.equal; 2123 op0lt = op0ltu = args.op0lt; 2124 op1lt = op1ltu = args.op1lt; 2125 } 2126#endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */ 2127 2128 /* Otherwise, see if the operands are both integers. */ 2129 else if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode) 2130 && (GET_CODE (trueop0) == CONST_DOUBLE 2131 || GET_CODE (trueop0) == CONST_INT) 2132 && (GET_CODE (trueop1) == CONST_DOUBLE 2133 || GET_CODE (trueop1) == CONST_INT)) 2134 { 2135 int width = GET_MODE_BITSIZE (mode); 2136 HOST_WIDE_INT l0s, h0s, l1s, h1s; 2137 unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u; 2138 2139 /* Get the two words comprising each integer constant. */ 2140 if (GET_CODE (trueop0) == CONST_DOUBLE) 2141 { 2142 l0u = l0s = CONST_DOUBLE_LOW (trueop0); 2143 h0u = h0s = CONST_DOUBLE_HIGH (trueop0); 2144 } 2145 else 2146 { 2147 l0u = l0s = INTVAL (trueop0); 2148 h0u = h0s = HWI_SIGN_EXTEND (l0s); 2149 } 2150 2151 if (GET_CODE (trueop1) == CONST_DOUBLE) 2152 { 2153 l1u = l1s = CONST_DOUBLE_LOW (trueop1); 2154 h1u = h1s = CONST_DOUBLE_HIGH (trueop1); 2155 } 2156 else 2157 { 2158 l1u = l1s = INTVAL (trueop1); 2159 h1u = h1s = HWI_SIGN_EXTEND (l1s); 2160 } 2161 2162 /* If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT, 2163 we have to sign or zero-extend the values. */ 2164 if (width != 0 && width < HOST_BITS_PER_WIDE_INT) 2165 { 2166 l0u &= ((HOST_WIDE_INT) 1 << width) - 1; 2167 l1u &= ((HOST_WIDE_INT) 1 << width) - 1; 2168 2169 if (l0s & ((HOST_WIDE_INT) 1 << (width - 1))) 2170 l0s |= ((HOST_WIDE_INT) (-1) << width); 2171 2172 if (l1s & ((HOST_WIDE_INT) 1 << (width - 1))) 2173 l1s |= ((HOST_WIDE_INT) (-1) << width); 2174 } 2175 if (width != 0 && width <= HOST_BITS_PER_WIDE_INT) 2176 h0u = h1u = 0, h0s = HWI_SIGN_EXTEND (l0s), h1s = HWI_SIGN_EXTEND (l1s); 2177 2178 equal = (h0u == h1u && l0u == l1u); 2179 op0lt = (h0s < h1s || (h0s == h1s && l0u < l1u)); 2180 op1lt = (h1s < h0s || (h1s == h0s && l1u < l0u)); 2181 op0ltu = (h0u < h1u || (h0u == h1u && l0u < l1u)); 2182 op1ltu = (h1u < h0u || (h1u == h0u && l1u < l0u)); 2183 } 2184 2185 /* Otherwise, there are some code-specific tests we can make. */ 2186 else 2187 { 2188 switch (code) 2189 { 2190 case EQ: 2191 /* References to the frame plus a constant or labels cannot 2192 be zero, but a SYMBOL_REF can due to #pragma weak. */ 2193 if (((NONZERO_BASE_PLUS_P (op0) && trueop1 == const0_rtx) 2194 || GET_CODE (trueop0) == LABEL_REF) 2195#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM 2196 /* On some machines, the ap reg can be 0 sometimes. */ 2197 && op0 != arg_pointer_rtx 2198#endif 2199 ) 2200 return const0_rtx; 2201 break; 2202 2203 case NE: 2204 if (((NONZERO_BASE_PLUS_P (op0) && trueop1 == const0_rtx) 2205 || GET_CODE (trueop0) == LABEL_REF) 2206#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM 2207 && op0 != arg_pointer_rtx 2208#endif 2209 ) 2210 return const_true_rtx; 2211 break; 2212 2213 case GEU: 2214 /* Unsigned values are never negative. */ 2215 if (trueop1 == const0_rtx) 2216 return const_true_rtx; 2217 break; 2218 2219 case LTU: 2220 if (trueop1 == const0_rtx) 2221 return const0_rtx; 2222 break; 2223 2224 case LEU: 2225 /* Unsigned values are never greater than the largest 2226 unsigned value. */ 2227 if (GET_CODE (trueop1) == CONST_INT 2228 && (unsigned HOST_WIDE_INT) INTVAL (trueop1) == GET_MODE_MASK (mode) 2229 && INTEGRAL_MODE_P (mode)) 2230 return const_true_rtx; 2231 break; 2232 2233 case GTU: 2234 if (GET_CODE (trueop1) == CONST_INT 2235 && (unsigned HOST_WIDE_INT) INTVAL (trueop1) == GET_MODE_MASK (mode) 2236 && INTEGRAL_MODE_P (mode)) 2237 return const0_rtx; 2238 break; 2239 2240 default: 2241 break; 2242 } 2243 2244 return 0; 2245 } 2246 2247 /* If we reach here, EQUAL, OP0LT, OP0LTU, OP1LT, and OP1LTU are set 2248 as appropriate. */ 2249 switch (code) 2250 { 2251 case EQ: 2252 case UNEQ: 2253 return equal ? const_true_rtx : const0_rtx; 2254 case NE: 2255 case LTGT: 2256 return ! equal ? const_true_rtx : const0_rtx; 2257 case LT: 2258 case UNLT: 2259 return op0lt ? const_true_rtx : const0_rtx; 2260 case GT: 2261 case UNGT: 2262 return op1lt ? const_true_rtx : const0_rtx; 2263 case LTU: 2264 return op0ltu ? const_true_rtx : const0_rtx; 2265 case GTU: 2266 return op1ltu ? const_true_rtx : const0_rtx; 2267 case LE: 2268 case UNLE: 2269 return equal || op0lt ? const_true_rtx : const0_rtx; 2270 case GE: 2271 case UNGE: 2272 return equal || op1lt ? const_true_rtx : const0_rtx; 2273 case LEU: 2274 return equal || op0ltu ? const_true_rtx : const0_rtx; 2275 case GEU: 2276 return equal || op1ltu ? const_true_rtx : const0_rtx; 2277 case ORDERED: 2278 return const_true_rtx; 2279 case UNORDERED: 2280 return const0_rtx; 2281 default: 2282 abort (); 2283 } 2284} 2285 2286/* Simplify CODE, an operation with result mode MODE and three operands, 2287 OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became 2288 a constant. Return 0 if no simplifications is possible. */ 2289 2290rtx 2291simplify_ternary_operation (code, mode, op0_mode, op0, op1, op2) 2292 enum rtx_code code; 2293 enum machine_mode mode, op0_mode; 2294 rtx op0, op1, op2; 2295{ 2296 unsigned int width = GET_MODE_BITSIZE (mode); 2297 2298 /* VOIDmode means "infinite" precision. */ 2299 if (width == 0) 2300 width = HOST_BITS_PER_WIDE_INT; 2301 2302 switch (code) 2303 { 2304 case SIGN_EXTRACT: 2305 case ZERO_EXTRACT: 2306 if (GET_CODE (op0) == CONST_INT 2307 && GET_CODE (op1) == CONST_INT 2308 && GET_CODE (op2) == CONST_INT 2309 && ((unsigned) INTVAL (op1) + (unsigned) INTVAL (op2) <= width) 2310 && width <= (unsigned) HOST_BITS_PER_WIDE_INT) 2311 { 2312 /* Extracting a bit-field from a constant */ 2313 HOST_WIDE_INT val = INTVAL (op0); 2314 2315 if (BITS_BIG_ENDIAN) 2316 val >>= (GET_MODE_BITSIZE (op0_mode) 2317 - INTVAL (op2) - INTVAL (op1)); 2318 else 2319 val >>= INTVAL (op2); 2320 2321 if (HOST_BITS_PER_WIDE_INT != INTVAL (op1)) 2322 { 2323 /* First zero-extend. */ 2324 val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1; 2325 /* If desired, propagate sign bit. */ 2326 if (code == SIGN_EXTRACT 2327 && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1)))) 2328 val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1); 2329 } 2330 2331 /* Clear the bits that don't belong in our mode, 2332 unless they and our sign bit are all one. 2333 So we get either a reasonable negative value or a reasonable 2334 unsigned value for this mode. */ 2335 if (width < HOST_BITS_PER_WIDE_INT 2336 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1))) 2337 != ((HOST_WIDE_INT) (-1) << (width - 1)))) 2338 val &= ((HOST_WIDE_INT) 1 << width) - 1; 2339 2340 return GEN_INT (val); 2341 } 2342 break; 2343 2344 case IF_THEN_ELSE: 2345 if (GET_CODE (op0) == CONST_INT) 2346 return op0 != const0_rtx ? op1 : op2; 2347 2348 /* Convert a == b ? b : a to "a". */ 2349 if (GET_CODE (op0) == NE && ! side_effects_p (op0) 2350 && (! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations) 2351 && rtx_equal_p (XEXP (op0, 0), op1) 2352 && rtx_equal_p (XEXP (op0, 1), op2)) 2353 return op1; 2354 else if (GET_CODE (op0) == EQ && ! side_effects_p (op0) 2355 && (! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations) 2356 && rtx_equal_p (XEXP (op0, 1), op1) 2357 && rtx_equal_p (XEXP (op0, 0), op2)) 2358 return op2; 2359 else if (GET_RTX_CLASS (GET_CODE (op0)) == '<' && ! side_effects_p (op0)) 2360 { 2361 enum machine_mode cmp_mode = (GET_MODE (XEXP (op0, 0)) == VOIDmode 2362 ? GET_MODE (XEXP (op0, 1)) 2363 : GET_MODE (XEXP (op0, 0))); 2364 rtx temp; 2365 if (cmp_mode == VOIDmode) 2366 cmp_mode = op0_mode; 2367 temp = simplify_relational_operation (GET_CODE (op0), cmp_mode, 2368 XEXP (op0, 0), XEXP (op0, 1)); 2369 2370 /* See if any simplifications were possible. */ 2371 if (temp == const0_rtx) 2372 return op2; 2373 else if (temp == const1_rtx) 2374 return op1; 2375 else if (temp) 2376 op0 = temp; 2377 2378 /* Look for happy constants in op1 and op2. */ 2379 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT) 2380 { 2381 HOST_WIDE_INT t = INTVAL (op1); 2382 HOST_WIDE_INT f = INTVAL (op2); 2383 2384 if (t == STORE_FLAG_VALUE && f == 0) 2385 code = GET_CODE (op0); 2386 else if (t == 0 && f == STORE_FLAG_VALUE) 2387 { 2388 enum rtx_code tmp; 2389 tmp = reversed_comparison_code (op0, NULL_RTX); 2390 if (tmp == UNKNOWN) 2391 break; 2392 code = tmp; 2393 } 2394 else 2395 break; 2396 2397 return gen_rtx_fmt_ee (code, mode, XEXP (op0, 0), XEXP (op0, 1)); 2398 } 2399 } 2400 break; 2401 2402 default: 2403 abort (); 2404 } 2405 2406 return 0; 2407} 2408 2409/* Simplify SUBREG:OUTERMODE(OP:INNERMODE, BYTE) 2410 Return 0 if no simplifications is possible. */ 2411rtx 2412simplify_subreg (outermode, op, innermode, byte) 2413 rtx op; 2414 unsigned int byte; 2415 enum machine_mode outermode, innermode; 2416{ 2417 /* Little bit of sanity checking. */ 2418 if (innermode == VOIDmode || outermode == VOIDmode 2419 || innermode == BLKmode || outermode == BLKmode) 2420 abort (); 2421 2422 if (GET_MODE (op) != innermode 2423 && GET_MODE (op) != VOIDmode) 2424 abort (); 2425 2426 if (byte % GET_MODE_SIZE (outermode) 2427 || byte >= GET_MODE_SIZE (innermode)) 2428 abort (); 2429 2430 if (outermode == innermode && !byte) 2431 return op; 2432 2433 /* Attempt to simplify constant to non-SUBREG expression. */ 2434 if (CONSTANT_P (op)) 2435 { 2436 int offset, part; 2437 unsigned HOST_WIDE_INT val = 0; 2438 2439 /* ??? This code is partly redundant with code below, but can handle 2440 the subregs of floats and similar corner cases. 2441 Later it we should move all simplification code here and rewrite 2442 GEN_LOWPART_IF_POSSIBLE, GEN_HIGHPART, OPERAND_SUBWORD and friends 2443 using SIMPLIFY_SUBREG. */ 2444 if (subreg_lowpart_offset (outermode, innermode) == byte) 2445 { 2446 rtx new = gen_lowpart_if_possible (outermode, op); 2447 if (new) 2448 return new; 2449 } 2450 2451 /* Similar comment as above apply here. */ 2452 if (GET_MODE_SIZE (outermode) == UNITS_PER_WORD 2453 && GET_MODE_SIZE (innermode) > UNITS_PER_WORD 2454 && GET_MODE_CLASS (outermode) == MODE_INT) 2455 { 2456 rtx new = constant_subword (op, 2457 (byte / UNITS_PER_WORD), 2458 innermode); 2459 if (new) 2460 return new; 2461 } 2462 2463 offset = byte * BITS_PER_UNIT; 2464 switch (GET_CODE (op)) 2465 { 2466 case CONST_DOUBLE: 2467 if (GET_MODE (op) != VOIDmode) 2468 break; 2469 2470 /* We can't handle this case yet. */ 2471 if (GET_MODE_BITSIZE (outermode) >= HOST_BITS_PER_WIDE_INT) 2472 return NULL_RTX; 2473 2474 part = offset >= HOST_BITS_PER_WIDE_INT; 2475 if ((BITS_PER_WORD > HOST_BITS_PER_WIDE_INT 2476 && BYTES_BIG_ENDIAN) 2477 || (BITS_PER_WORD <= HOST_BITS_PER_WIDE_INT 2478 && WORDS_BIG_ENDIAN)) 2479 part = !part; 2480 val = part ? CONST_DOUBLE_HIGH (op) : CONST_DOUBLE_LOW (op); 2481 offset %= HOST_BITS_PER_WIDE_INT; 2482 2483 /* We've already picked the word we want from a double, so 2484 pretend this is actually an integer. */ 2485 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0); 2486 2487 /* FALLTHROUGH */ 2488 case CONST_INT: 2489 if (GET_CODE (op) == CONST_INT) 2490 val = INTVAL (op); 2491 2492 /* We don't handle synthetizing of non-integral constants yet. */ 2493 if (GET_MODE_CLASS (outermode) != MODE_INT) 2494 return NULL_RTX; 2495 2496 if (BYTES_BIG_ENDIAN || WORDS_BIG_ENDIAN) 2497 { 2498 if (WORDS_BIG_ENDIAN) 2499 offset = (GET_MODE_BITSIZE (innermode) 2500 - GET_MODE_BITSIZE (outermode) - offset); 2501 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN 2502 && GET_MODE_SIZE (outermode) < UNITS_PER_WORD) 2503 offset = (offset + BITS_PER_WORD - GET_MODE_BITSIZE (outermode) 2504 - 2 * (offset % BITS_PER_WORD)); 2505 } 2506 2507 if (offset >= HOST_BITS_PER_WIDE_INT) 2508 return ((HOST_WIDE_INT) val < 0) ? constm1_rtx : const0_rtx; 2509 else 2510 { 2511 val >>= offset; 2512 if (GET_MODE_BITSIZE (outermode) < HOST_BITS_PER_WIDE_INT) 2513 val = trunc_int_for_mode (val, outermode); 2514 return GEN_INT (val); 2515 } 2516 default: 2517 break; 2518 } 2519 } 2520 2521 /* Changing mode twice with SUBREG => just change it once, 2522 or not at all if changing back op starting mode. */ 2523 if (GET_CODE (op) == SUBREG) 2524 { 2525 enum machine_mode innermostmode = GET_MODE (SUBREG_REG (op)); 2526 int final_offset = byte + SUBREG_BYTE (op); 2527 rtx new; 2528 2529 if (outermode == innermostmode 2530 && byte == 0 && SUBREG_BYTE (op) == 0) 2531 return SUBREG_REG (op); 2532 2533 /* The SUBREG_BYTE represents offset, as if the value were stored 2534 in memory. Irritating exception is paradoxical subreg, where 2535 we define SUBREG_BYTE to be 0. On big endian machines, this 2536 value should be negative. For a moment, undo this exception. */ 2537 if (byte == 0 && GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode)) 2538 { 2539 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode)); 2540 if (WORDS_BIG_ENDIAN) 2541 final_offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD; 2542 if (BYTES_BIG_ENDIAN) 2543 final_offset += difference % UNITS_PER_WORD; 2544 } 2545 if (SUBREG_BYTE (op) == 0 2546 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode)) 2547 { 2548 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode)); 2549 if (WORDS_BIG_ENDIAN) 2550 final_offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD; 2551 if (BYTES_BIG_ENDIAN) 2552 final_offset += difference % UNITS_PER_WORD; 2553 } 2554 2555 /* See whether resulting subreg will be paradoxical. */ 2556 if (GET_MODE_SIZE (innermostmode) > GET_MODE_SIZE (outermode)) 2557 { 2558 /* In nonparadoxical subregs we can't handle negative offsets. */ 2559 if (final_offset < 0) 2560 return NULL_RTX; 2561 /* Bail out in case resulting subreg would be incorrect. */ 2562 if (final_offset % GET_MODE_SIZE (outermode) 2563 || (unsigned) final_offset >= GET_MODE_SIZE (innermostmode)) 2564 return NULL_RTX; 2565 } 2566 else 2567 { 2568 int offset = 0; 2569 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (outermode)); 2570 2571 /* In paradoxical subreg, see if we are still looking on lower part. 2572 If so, our SUBREG_BYTE will be 0. */ 2573 if (WORDS_BIG_ENDIAN) 2574 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD; 2575 if (BYTES_BIG_ENDIAN) 2576 offset += difference % UNITS_PER_WORD; 2577 if (offset == final_offset) 2578 final_offset = 0; 2579 else 2580 return NULL_RTX; 2581 } 2582 2583 /* Recurse for futher possible simplifications. */ 2584 new = simplify_subreg (outermode, SUBREG_REG (op), 2585 GET_MODE (SUBREG_REG (op)), 2586 final_offset); 2587 if (new) 2588 return new; 2589 return gen_rtx_SUBREG (outermode, SUBREG_REG (op), final_offset); 2590 } 2591 2592 /* SUBREG of a hard register => just change the register number 2593 and/or mode. If the hard register is not valid in that mode, 2594 suppress this simplification. If the hard register is the stack, 2595 frame, or argument pointer, leave this as a SUBREG. */ 2596 2597 if (REG_P (op) 2598 && (! REG_FUNCTION_VALUE_P (op) 2599 || ! rtx_equal_function_value_matters) 2600#ifdef CLASS_CANNOT_CHANGE_MODE 2601 && ! (CLASS_CANNOT_CHANGE_MODE_P (outermode, innermode) 2602 && GET_MODE_CLASS (innermode) != MODE_COMPLEX_INT 2603 && GET_MODE_CLASS (innermode) != MODE_COMPLEX_FLOAT 2604 && (TEST_HARD_REG_BIT 2605 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE], 2606 REGNO (op)))) 2607#endif 2608 && REGNO (op) < FIRST_PSEUDO_REGISTER 2609 && ((reload_completed && !frame_pointer_needed) 2610 || (REGNO (op) != FRAME_POINTER_REGNUM 2611#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM 2612 && REGNO (op) != HARD_FRAME_POINTER_REGNUM 2613#endif 2614 )) 2615#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM 2616 && REGNO (op) != ARG_POINTER_REGNUM 2617#endif 2618 && REGNO (op) != STACK_POINTER_REGNUM) 2619 { 2620 int final_regno = subreg_hard_regno (gen_rtx_SUBREG (outermode, op, byte), 2621 0); 2622 2623 /* ??? We do allow it if the current REG is not valid for 2624 its mode. This is a kludge to work around how float/complex 2625 arguments are passed on 32-bit Sparc and should be fixed. */ 2626 if (HARD_REGNO_MODE_OK (final_regno, outermode) 2627 || ! HARD_REGNO_MODE_OK (REGNO (op), innermode)) 2628 { 2629 rtx x = gen_rtx_REG (outermode, final_regno); 2630 2631 /* Propagate original regno. We don't have any way to specify 2632 the offset inside orignal regno, so do so only for lowpart. 2633 The information is used only by alias analysis that can not 2634 grog partial register anyway. */ 2635 2636 if (subreg_lowpart_offset (outermode, innermode) == byte) 2637 ORIGINAL_REGNO (x) = ORIGINAL_REGNO (op); 2638 return x; 2639 } 2640 } 2641 2642 /* If we have a SUBREG of a register that we are replacing and we are 2643 replacing it with a MEM, make a new MEM and try replacing the 2644 SUBREG with it. Don't do this if the MEM has a mode-dependent address 2645 or if we would be widening it. */ 2646 2647 if (GET_CODE (op) == MEM 2648 && ! mode_dependent_address_p (XEXP (op, 0)) 2649 /* Allow splitting of volatile memory references in case we don't 2650 have instruction to move the whole thing. */ 2651 && (! MEM_VOLATILE_P (op) 2652 || ! have_insn_for (SET, innermode)) 2653 && GET_MODE_SIZE (outermode) <= GET_MODE_SIZE (GET_MODE (op))) 2654 return adjust_address_nv (op, outermode, byte); 2655 2656 /* Handle complex values represented as CONCAT 2657 of real and imaginary part. */ 2658 if (GET_CODE (op) == CONCAT) 2659 { 2660 int is_realpart = byte < GET_MODE_UNIT_SIZE (innermode); 2661 rtx part = is_realpart ? XEXP (op, 0) : XEXP (op, 1); 2662 unsigned int final_offset; 2663 rtx res; 2664 2665 final_offset = byte % (GET_MODE_UNIT_SIZE (innermode)); 2666 res = simplify_subreg (outermode, part, GET_MODE (part), final_offset); 2667 if (res) 2668 return res; 2669 /* We can at least simplify it by referring directly to the relevant part. */ 2670 return gen_rtx_SUBREG (outermode, part, final_offset); 2671 } 2672 2673 return NULL_RTX; 2674} 2675/* Make a SUBREG operation or equivalent if it folds. */ 2676 2677rtx 2678simplify_gen_subreg (outermode, op, innermode, byte) 2679 rtx op; 2680 unsigned int byte; 2681 enum machine_mode outermode, innermode; 2682{ 2683 rtx new; 2684 /* Little bit of sanity checking. */ 2685 if (innermode == VOIDmode || outermode == VOIDmode 2686 || innermode == BLKmode || outermode == BLKmode) 2687 abort (); 2688 2689 if (GET_MODE (op) != innermode 2690 && GET_MODE (op) != VOIDmode) 2691 abort (); 2692 2693 if (byte % GET_MODE_SIZE (outermode) 2694 || byte >= GET_MODE_SIZE (innermode)) 2695 abort (); 2696 2697 if (GET_CODE (op) == QUEUED) 2698 return NULL_RTX; 2699 2700 new = simplify_subreg (outermode, op, innermode, byte); 2701 if (new) 2702 return new; 2703 2704 if (GET_CODE (op) == SUBREG || GET_MODE (op) == VOIDmode) 2705 return NULL_RTX; 2706 2707 return gen_rtx_SUBREG (outermode, op, byte); 2708} 2709/* Simplify X, an rtx expression. 2710 2711 Return the simplified expression or NULL if no simplifications 2712 were possible. 2713 2714 This is the preferred entry point into the simplification routines; 2715 however, we still allow passes to call the more specific routines. 2716 2717 Right now GCC has three (yes, three) major bodies of RTL simplficiation 2718 code that need to be unified. 2719 2720 1. fold_rtx in cse.c. This code uses various CSE specific 2721 information to aid in RTL simplification. 2722 2723 2. simplify_rtx in combine.c. Similar to fold_rtx, except that 2724 it uses combine specific information to aid in RTL 2725 simplification. 2726 2727 3. The routines in this file. 2728 2729 2730 Long term we want to only have one body of simplification code; to 2731 get to that state I recommend the following steps: 2732 2733 1. Pour over fold_rtx & simplify_rtx and move any simplifications 2734 which are not pass dependent state into these routines. 2735 2736 2. As code is moved by #1, change fold_rtx & simplify_rtx to 2737 use this routine whenever possible. 2738 2739 3. Allow for pass dependent state to be provided to these 2740 routines and add simplifications based on the pass dependent 2741 state. Remove code from cse.c & combine.c that becomes 2742 redundant/dead. 2743 2744 It will take time, but ultimately the compiler will be easier to 2745 maintain and improve. It's totally silly that when we add a 2746 simplification that it needs to be added to 4 places (3 for RTL 2747 simplification and 1 for tree simplification. */ 2748 2749rtx 2750simplify_rtx (x) 2751 rtx x; 2752{ 2753 enum rtx_code code = GET_CODE (x); 2754 enum machine_mode mode = GET_MODE (x); 2755 2756 switch (GET_RTX_CLASS (code)) 2757 { 2758 case '1': 2759 return simplify_unary_operation (code, mode, 2760 XEXP (x, 0), GET_MODE (XEXP (x, 0))); 2761 case 'c': 2762 if (swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1))) 2763 { 2764 rtx tem; 2765 2766 tem = XEXP (x, 0); 2767 XEXP (x, 0) = XEXP (x, 1); 2768 XEXP (x, 1) = tem; 2769 return simplify_binary_operation (code, mode, 2770 XEXP (x, 0), XEXP (x, 1)); 2771 } 2772 2773 case '2': 2774 return simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1)); 2775 2776 case '3': 2777 case 'b': 2778 return simplify_ternary_operation (code, mode, GET_MODE (XEXP (x, 0)), 2779 XEXP (x, 0), XEXP (x, 1), 2780 XEXP (x, 2)); 2781 2782 case '<': 2783 return simplify_relational_operation (code, 2784 ((GET_MODE (XEXP (x, 0)) 2785 != VOIDmode) 2786 ? GET_MODE (XEXP (x, 0)) 2787 : GET_MODE (XEXP (x, 1))), 2788 XEXP (x, 0), XEXP (x, 1)); 2789 case 'x': 2790 /* The only case we try to handle is a SUBREG. */ 2791 if (code == SUBREG) 2792 return simplify_gen_subreg (mode, SUBREG_REG (x), 2793 GET_MODE (SUBREG_REG (x)), 2794 SUBREG_BYTE (x)); 2795 return NULL; 2796 default: 2797 return NULL; 2798 } 2799} 2800