PPCInstrAltivec.td revision 193630
1193323Sed//===- PPCInstrAltivec.td - The PowerPC Altivec Extension --*- tablegen -*-===// 2193323Sed// 3193323Sed// The LLVM Compiler Infrastructure 4193323Sed// 5193323Sed// This file is distributed under the University of Illinois Open Source 6193323Sed// License. See LICENSE.TXT for details. 7193323Sed// 8193323Sed//===----------------------------------------------------------------------===// 9193323Sed// 10193323Sed// This file describes the Altivec extension to the PowerPC instruction set. 11193323Sed// 12193323Sed//===----------------------------------------------------------------------===// 13193323Sed 14193323Sed//===----------------------------------------------------------------------===// 15193323Sed// Altivec transformation functions and pattern fragments. 16193323Sed// 17193323Sed 18193323Sed 19193323Seddef vpkuhum_shuffle : PatFrag<(ops node:$lhs, node:$rhs), 20193323Sed (vector_shuffle node:$lhs, node:$rhs), [{ 21193323Sed return PPC::isVPKUHUMShuffleMask(cast<ShuffleVectorSDNode>(N), false); 22193323Sed}]>; 23193323Seddef vpkuwum_shuffle : PatFrag<(ops node:$lhs, node:$rhs), 24193323Sed (vector_shuffle node:$lhs, node:$rhs), [{ 25193323Sed return PPC::isVPKUWUMShuffleMask(cast<ShuffleVectorSDNode>(N), false); 26193323Sed}]>; 27193323Seddef vpkuhum_unary_shuffle : PatFrag<(ops node:$lhs, node:$rhs), 28193323Sed (vector_shuffle node:$lhs, node:$rhs), [{ 29193323Sed return PPC::isVPKUHUMShuffleMask(cast<ShuffleVectorSDNode>(N), true); 30193323Sed}]>; 31193323Seddef vpkuwum_unary_shuffle : PatFrag<(ops node:$lhs, node:$rhs), 32193323Sed (vector_shuffle node:$lhs, node:$rhs), [{ 33193323Sed return PPC::isVPKUWUMShuffleMask(cast<ShuffleVectorSDNode>(N), true); 34193323Sed}]>; 35193323Sed 36193323Sed 37193323Seddef vmrglb_shuffle : PatFrag<(ops node:$lhs, node:$rhs), 38193323Sed (vector_shuffle node:$lhs, node:$rhs), [{ 39193323Sed return PPC::isVMRGLShuffleMask(cast<ShuffleVectorSDNode>(N), 1, false); 40193323Sed}]>; 41193323Seddef vmrglh_shuffle : PatFrag<(ops node:$lhs, node:$rhs), 42193323Sed (vector_shuffle node:$lhs, node:$rhs), [{ 43193323Sed return PPC::isVMRGLShuffleMask(cast<ShuffleVectorSDNode>(N), 2, false); 44193323Sed}]>; 45193323Seddef vmrglw_shuffle : PatFrag<(ops node:$lhs, node:$rhs), 46193323Sed (vector_shuffle node:$lhs, node:$rhs), [{ 47193323Sed return PPC::isVMRGLShuffleMask(cast<ShuffleVectorSDNode>(N), 4, false); 48193323Sed}]>; 49193323Seddef vmrghb_shuffle : PatFrag<(ops node:$lhs, node:$rhs), 50193323Sed (vector_shuffle node:$lhs, node:$rhs), [{ 51193323Sed return PPC::isVMRGHShuffleMask(cast<ShuffleVectorSDNode>(N), 1, false); 52193323Sed}]>; 53193323Seddef vmrghh_shuffle : PatFrag<(ops node:$lhs, node:$rhs), 54193323Sed (vector_shuffle node:$lhs, node:$rhs), [{ 55193323Sed return PPC::isVMRGHShuffleMask(cast<ShuffleVectorSDNode>(N), 2, false); 56193323Sed}]>; 57193323Seddef vmrghw_shuffle : PatFrag<(ops node:$lhs, node:$rhs), 58193323Sed (vector_shuffle node:$lhs, node:$rhs), [{ 59193323Sed return PPC::isVMRGHShuffleMask(cast<ShuffleVectorSDNode>(N), 4, false); 60193323Sed}]>; 61193323Sed 62193323Sed 63193323Seddef vmrglb_unary_shuffle : PatFrag<(ops node:$lhs, node:$rhs), 64193323Sed (vector_shuffle node:$lhs, node:$rhs), [{ 65193323Sed return PPC::isVMRGLShuffleMask(cast<ShuffleVectorSDNode>(N), 1, true); 66193323Sed}]>; 67193323Seddef vmrglh_unary_shuffle : PatFrag<(ops node:$lhs, node:$rhs), 68193323Sed (vector_shuffle node:$lhs, node:$rhs), [{ 69193323Sed return PPC::isVMRGLShuffleMask(cast<ShuffleVectorSDNode>(N), 2, true); 70193323Sed}]>; 71193323Seddef vmrglw_unary_shuffle : PatFrag<(ops node:$lhs, node:$rhs), 72193323Sed (vector_shuffle node:$lhs, node:$rhs), [{ 73193323Sed return PPC::isVMRGLShuffleMask(cast<ShuffleVectorSDNode>(N), 4, true); 74193323Sed}]>; 75193323Seddef vmrghb_unary_shuffle : PatFrag<(ops node:$lhs, node:$rhs), 76193323Sed (vector_shuffle node:$lhs, node:$rhs), [{ 77193323Sed return PPC::isVMRGHShuffleMask(cast<ShuffleVectorSDNode>(N), 1, true); 78193323Sed}]>; 79193323Seddef vmrghh_unary_shuffle : PatFrag<(ops node:$lhs, node:$rhs), 80193323Sed (vector_shuffle node:$lhs, node:$rhs), [{ 81193323Sed return PPC::isVMRGHShuffleMask(cast<ShuffleVectorSDNode>(N), 2, true); 82193323Sed}]>; 83193323Seddef vmrghw_unary_shuffle : PatFrag<(ops node:$lhs, node:$rhs), 84193323Sed (vector_shuffle node:$lhs, node:$rhs), [{ 85193323Sed return PPC::isVMRGHShuffleMask(cast<ShuffleVectorSDNode>(N), 4, true); 86193323Sed}]>; 87193323Sed 88193323Sed 89193323Seddef VSLDOI_get_imm : SDNodeXForm<vector_shuffle, [{ 90193323Sed return getI32Imm(PPC::isVSLDOIShuffleMask(N, false)); 91193323Sed}]>; 92193323Seddef vsldoi_shuffle : PatFrag<(ops node:$lhs, node:$rhs), 93193323Sed (vector_shuffle node:$lhs, node:$rhs), [{ 94193323Sed return PPC::isVSLDOIShuffleMask(N, false) != -1; 95193323Sed}], VSLDOI_get_imm>; 96193323Sed 97193323Sed 98193323Sed/// VSLDOI_unary* - These are used to match vsldoi(X,X), which is turned into 99193323Sed/// vector_shuffle(X,undef,mask) by the dag combiner. 100193323Seddef VSLDOI_unary_get_imm : SDNodeXForm<vector_shuffle, [{ 101193323Sed return getI32Imm(PPC::isVSLDOIShuffleMask(N, true)); 102193323Sed}]>; 103193323Seddef vsldoi_unary_shuffle : PatFrag<(ops node:$lhs, node:$rhs), 104193323Sed (vector_shuffle node:$lhs, node:$rhs), [{ 105193323Sed return PPC::isVSLDOIShuffleMask(N, true) != -1; 106193323Sed}], VSLDOI_unary_get_imm>; 107193323Sed 108193323Sed 109193323Sed// VSPLT*_get_imm xform function: convert vector_shuffle mask to VSPLT* imm. 110193323Seddef VSPLTB_get_imm : SDNodeXForm<vector_shuffle, [{ 111193323Sed return getI32Imm(PPC::getVSPLTImmediate(N, 1)); 112193323Sed}]>; 113193323Seddef vspltb_shuffle : PatFrag<(ops node:$lhs, node:$rhs), 114193323Sed (vector_shuffle node:$lhs, node:$rhs), [{ 115193323Sed return PPC::isSplatShuffleMask(cast<ShuffleVectorSDNode>(N), 1); 116193323Sed}], VSPLTB_get_imm>; 117193323Seddef VSPLTH_get_imm : SDNodeXForm<vector_shuffle, [{ 118193323Sed return getI32Imm(PPC::getVSPLTImmediate(N, 2)); 119193323Sed}]>; 120193323Seddef vsplth_shuffle : PatFrag<(ops node:$lhs, node:$rhs), 121193323Sed (vector_shuffle node:$lhs, node:$rhs), [{ 122193323Sed return PPC::isSplatShuffleMask(cast<ShuffleVectorSDNode>(N), 2); 123193323Sed}], VSPLTH_get_imm>; 124193323Seddef VSPLTW_get_imm : SDNodeXForm<vector_shuffle, [{ 125193323Sed return getI32Imm(PPC::getVSPLTImmediate(N, 4)); 126193323Sed}]>; 127193323Seddef vspltw_shuffle : PatFrag<(ops node:$lhs, node:$rhs), 128193323Sed (vector_shuffle node:$lhs, node:$rhs), [{ 129193323Sed return PPC::isSplatShuffleMask(cast<ShuffleVectorSDNode>(N), 4); 130193323Sed}], VSPLTW_get_imm>; 131193323Sed 132193323Sed 133193323Sed// VSPLTISB_get_imm xform function: convert build_vector to VSPLTISB imm. 134193323Seddef VSPLTISB_get_imm : SDNodeXForm<build_vector, [{ 135193323Sed return PPC::get_VSPLTI_elt(N, 1, *CurDAG); 136193323Sed}]>; 137193323Seddef vecspltisb : PatLeaf<(build_vector), [{ 138193323Sed return PPC::get_VSPLTI_elt(N, 1, *CurDAG).getNode() != 0; 139193323Sed}], VSPLTISB_get_imm>; 140193323Sed 141193323Sed// VSPLTISH_get_imm xform function: convert build_vector to VSPLTISH imm. 142193323Seddef VSPLTISH_get_imm : SDNodeXForm<build_vector, [{ 143193323Sed return PPC::get_VSPLTI_elt(N, 2, *CurDAG); 144193323Sed}]>; 145193323Seddef vecspltish : PatLeaf<(build_vector), [{ 146193323Sed return PPC::get_VSPLTI_elt(N, 2, *CurDAG).getNode() != 0; 147193323Sed}], VSPLTISH_get_imm>; 148193323Sed 149193323Sed// VSPLTISW_get_imm xform function: convert build_vector to VSPLTISW imm. 150193323Seddef VSPLTISW_get_imm : SDNodeXForm<build_vector, [{ 151193323Sed return PPC::get_VSPLTI_elt(N, 4, *CurDAG); 152193323Sed}]>; 153193323Seddef vecspltisw : PatLeaf<(build_vector), [{ 154193323Sed return PPC::get_VSPLTI_elt(N, 4, *CurDAG).getNode() != 0; 155193323Sed}], VSPLTISW_get_imm>; 156193323Sed 157193323Seddef V_immneg0 : PatLeaf<(build_vector), [{ 158193323Sed return PPC::isAllNegativeZeroVector(N); 159193323Sed}]>; 160193323Sed 161193323Sed//===----------------------------------------------------------------------===// 162193323Sed// Helpers for defining instructions that directly correspond to intrinsics. 163193323Sed 164193323Sed// VA1a_Int - A VAForm_1a intrinsic definition. 165193323Sedclass VA1a_Int<bits<6> xo, string opc, Intrinsic IntID> 166193323Sed : VAForm_1a<xo, (outs VRRC:$vD), (ins VRRC:$vA, VRRC:$vB, VRRC:$vC), 167193323Sed !strconcat(opc, " $vD, $vA, $vB, $vC"), VecFP, 168193323Sed [(set VRRC:$vD, (IntID VRRC:$vA, VRRC:$vB, VRRC:$vC))]>; 169193323Sed 170193323Sed// VX1_Int - A VXForm_1 intrinsic definition. 171193323Sedclass VX1_Int<bits<11> xo, string opc, Intrinsic IntID> 172193323Sed : VXForm_1<xo, (outs VRRC:$vD), (ins VRRC:$vA, VRRC:$vB), 173193323Sed !strconcat(opc, " $vD, $vA, $vB"), VecFP, 174193323Sed [(set VRRC:$vD, (IntID VRRC:$vA, VRRC:$vB))]>; 175193323Sed 176193323Sed// VX2_Int - A VXForm_2 intrinsic definition. 177193323Sedclass VX2_Int<bits<11> xo, string opc, Intrinsic IntID> 178193323Sed : VXForm_2<xo, (outs VRRC:$vD), (ins VRRC:$vB), 179193323Sed !strconcat(opc, " $vD, $vB"), VecFP, 180193323Sed [(set VRRC:$vD, (IntID VRRC:$vB))]>; 181193323Sed 182193323Sed//===----------------------------------------------------------------------===// 183193323Sed// Instruction Definitions. 184193323Sed 185193323Seddef DSS : DSS_Form<822, (outs), 186193323Sed (ins u5imm:$ZERO0, u5imm:$STRM,u5imm:$ZERO1,u5imm:$ZERO2), 187193323Sed "dss $STRM", LdStGeneral /*FIXME*/, []>; 188193323Seddef DSSALL : DSS_Form<822, (outs), 189193323Sed (ins u5imm:$ONE, u5imm:$ZERO0,u5imm:$ZERO1,u5imm:$ZERO2), 190193323Sed "dssall", LdStGeneral /*FIXME*/, []>; 191193323Seddef DST : DSS_Form<342, (outs), 192193323Sed (ins u5imm:$ZERO, u5imm:$STRM, GPRC:$rA, GPRC:$rB), 193193323Sed "dst $rA, $rB, $STRM", LdStGeneral /*FIXME*/, []>; 194193323Seddef DSTT : DSS_Form<342, (outs), 195193323Sed (ins u5imm:$ONE, u5imm:$STRM, GPRC:$rA, GPRC:$rB), 196193323Sed "dstt $rA, $rB, $STRM", LdStGeneral /*FIXME*/, []>; 197193323Seddef DSTST : DSS_Form<374, (outs), 198193323Sed (ins u5imm:$ZERO, u5imm:$STRM, GPRC:$rA, GPRC:$rB), 199193323Sed "dstst $rA, $rB, $STRM", LdStGeneral /*FIXME*/, []>; 200193323Seddef DSTSTT : DSS_Form<374, (outs), 201193323Sed (ins u5imm:$ONE, u5imm:$STRM, GPRC:$rA, GPRC:$rB), 202193323Sed "dststt $rA, $rB, $STRM", LdStGeneral /*FIXME*/, []>; 203193323Sed 204193323Seddef DST64 : DSS_Form<342, (outs), 205193323Sed (ins u5imm:$ZERO, u5imm:$STRM, G8RC:$rA, GPRC:$rB), 206193323Sed "dst $rA, $rB, $STRM", LdStGeneral /*FIXME*/, []>; 207193323Seddef DSTT64 : DSS_Form<342, (outs), 208193323Sed (ins u5imm:$ONE, u5imm:$STRM, G8RC:$rA, GPRC:$rB), 209193323Sed "dstt $rA, $rB, $STRM", LdStGeneral /*FIXME*/, []>; 210193323Seddef DSTST64 : DSS_Form<374, (outs), 211193323Sed (ins u5imm:$ZERO, u5imm:$STRM, G8RC:$rA, GPRC:$rB), 212193323Sed "dstst $rA, $rB, $STRM", LdStGeneral /*FIXME*/, []>; 213193323Seddef DSTSTT64 : DSS_Form<374, (outs), 214193323Sed (ins u5imm:$ONE, u5imm:$STRM, G8RC:$rA, GPRC:$rB), 215193323Sed "dststt $rA, $rB, $STRM", LdStGeneral /*FIXME*/, []>; 216193323Sed 217193323Seddef MFVSCR : VXForm_4<1540, (outs VRRC:$vD), (ins), 218193323Sed "mfvscr $vD", LdStGeneral, 219193323Sed [(set VRRC:$vD, (int_ppc_altivec_mfvscr))]>; 220193323Seddef MTVSCR : VXForm_5<1604, (outs), (ins VRRC:$vB), 221193323Sed "mtvscr $vB", LdStGeneral, 222193323Sed [(int_ppc_altivec_mtvscr VRRC:$vB)]>; 223193323Sed 224193323Sedlet canFoldAsLoad = 1, PPC970_Unit = 2 in { // Loads. 225193323Seddef LVEBX: XForm_1<31, 7, (outs VRRC:$vD), (ins memrr:$src), 226193323Sed "lvebx $vD, $src", LdStGeneral, 227193323Sed [(set VRRC:$vD, (int_ppc_altivec_lvebx xoaddr:$src))]>; 228193323Seddef LVEHX: XForm_1<31, 39, (outs VRRC:$vD), (ins memrr:$src), 229193323Sed "lvehx $vD, $src", LdStGeneral, 230193323Sed [(set VRRC:$vD, (int_ppc_altivec_lvehx xoaddr:$src))]>; 231193323Seddef LVEWX: XForm_1<31, 71, (outs VRRC:$vD), (ins memrr:$src), 232193323Sed "lvewx $vD, $src", LdStGeneral, 233193323Sed [(set VRRC:$vD, (int_ppc_altivec_lvewx xoaddr:$src))]>; 234193323Seddef LVX : XForm_1<31, 103, (outs VRRC:$vD), (ins memrr:$src), 235193323Sed "lvx $vD, $src", LdStGeneral, 236193323Sed [(set VRRC:$vD, (int_ppc_altivec_lvx xoaddr:$src))]>; 237193323Seddef LVXL : XForm_1<31, 359, (outs VRRC:$vD), (ins memrr:$src), 238193323Sed "lvxl $vD, $src", LdStGeneral, 239193323Sed [(set VRRC:$vD, (int_ppc_altivec_lvxl xoaddr:$src))]>; 240193323Sed} 241193323Sed 242193323Seddef LVSL : XForm_1<31, 6, (outs VRRC:$vD), (ins memrr:$src), 243193323Sed "lvsl $vD, $src", LdStGeneral, 244193323Sed [(set VRRC:$vD, (int_ppc_altivec_lvsl xoaddr:$src))]>, 245193323Sed PPC970_Unit_LSU; 246193323Seddef LVSR : XForm_1<31, 38, (outs VRRC:$vD), (ins memrr:$src), 247193323Sed "lvsr $vD, $src", LdStGeneral, 248193323Sed [(set VRRC:$vD, (int_ppc_altivec_lvsr xoaddr:$src))]>, 249193323Sed PPC970_Unit_LSU; 250193323Sed 251193323Sedlet PPC970_Unit = 2 in { // Stores. 252193323Seddef STVEBX: XForm_8<31, 135, (outs), (ins VRRC:$rS, memrr:$dst), 253193323Sed "stvebx $rS, $dst", LdStGeneral, 254193323Sed [(int_ppc_altivec_stvebx VRRC:$rS, xoaddr:$dst)]>; 255193323Seddef STVEHX: XForm_8<31, 167, (outs), (ins VRRC:$rS, memrr:$dst), 256193323Sed "stvehx $rS, $dst", LdStGeneral, 257193323Sed [(int_ppc_altivec_stvehx VRRC:$rS, xoaddr:$dst)]>; 258193323Seddef STVEWX: XForm_8<31, 199, (outs), (ins VRRC:$rS, memrr:$dst), 259193323Sed "stvewx $rS, $dst", LdStGeneral, 260193323Sed [(int_ppc_altivec_stvewx VRRC:$rS, xoaddr:$dst)]>; 261193323Seddef STVX : XForm_8<31, 231, (outs), (ins VRRC:$rS, memrr:$dst), 262193323Sed "stvx $rS, $dst", LdStGeneral, 263193323Sed [(int_ppc_altivec_stvx VRRC:$rS, xoaddr:$dst)]>; 264193323Seddef STVXL : XForm_8<31, 487, (outs), (ins VRRC:$rS, memrr:$dst), 265193323Sed "stvxl $rS, $dst", LdStGeneral, 266193323Sed [(int_ppc_altivec_stvxl VRRC:$rS, xoaddr:$dst)]>; 267193323Sed} 268193323Sed 269193323Sedlet PPC970_Unit = 5 in { // VALU Operations. 270193323Sed// VA-Form instructions. 3-input AltiVec ops. 271193323Seddef VMADDFP : VAForm_1<46, (outs VRRC:$vD), (ins VRRC:$vA, VRRC:$vC, VRRC:$vB), 272193323Sed "vmaddfp $vD, $vA, $vC, $vB", VecFP, 273193323Sed [(set VRRC:$vD, (fadd (fmul VRRC:$vA, VRRC:$vC), 274193323Sed VRRC:$vB))]>, 275193323Sed Requires<[FPContractions]>; 276193323Seddef VNMSUBFP: VAForm_1<47, (outs VRRC:$vD), (ins VRRC:$vA, VRRC:$vC, VRRC:$vB), 277193323Sed "vnmsubfp $vD, $vA, $vC, $vB", VecFP, 278193323Sed [(set VRRC:$vD, (fsub V_immneg0, 279193323Sed (fsub (fmul VRRC:$vA, VRRC:$vC), 280193323Sed VRRC:$vB)))]>, 281193323Sed Requires<[FPContractions]>; 282193323Sed 283193323Seddef VMHADDSHS : VA1a_Int<32, "vmhaddshs", int_ppc_altivec_vmhaddshs>; 284193323Seddef VMHRADDSHS : VA1a_Int<33, "vmhraddshs", int_ppc_altivec_vmhraddshs>; 285193323Seddef VMLADDUHM : VA1a_Int<34, "vmladduhm", int_ppc_altivec_vmladduhm>; 286193323Seddef VPERM : VA1a_Int<43, "vperm", int_ppc_altivec_vperm>; 287193323Seddef VSEL : VA1a_Int<42, "vsel", int_ppc_altivec_vsel>; 288193323Sed 289193323Sed// Shuffles. 290193323Seddef VSLDOI : VAForm_2<44, (outs VRRC:$vD), (ins VRRC:$vA, VRRC:$vB, u5imm:$SH), 291193323Sed "vsldoi $vD, $vA, $vB, $SH", VecFP, 292193323Sed [(set VRRC:$vD, 293193323Sed (vsldoi_shuffle:$SH (v16i8 VRRC:$vA), VRRC:$vB))]>; 294193323Sed 295193323Sed// VX-Form instructions. AltiVec arithmetic ops. 296193323Seddef VADDFP : VXForm_1<10, (outs VRRC:$vD), (ins VRRC:$vA, VRRC:$vB), 297193323Sed "vaddfp $vD, $vA, $vB", VecFP, 298193323Sed [(set VRRC:$vD, (fadd VRRC:$vA, VRRC:$vB))]>; 299193323Sed 300193323Seddef VADDUBM : VXForm_1<0, (outs VRRC:$vD), (ins VRRC:$vA, VRRC:$vB), 301193323Sed "vaddubm $vD, $vA, $vB", VecGeneral, 302193323Sed [(set VRRC:$vD, (add (v16i8 VRRC:$vA), VRRC:$vB))]>; 303193323Seddef VADDUHM : VXForm_1<64, (outs VRRC:$vD), (ins VRRC:$vA, VRRC:$vB), 304193323Sed "vadduhm $vD, $vA, $vB", VecGeneral, 305193323Sed [(set VRRC:$vD, (add (v8i16 VRRC:$vA), VRRC:$vB))]>; 306193323Seddef VADDUWM : VXForm_1<128, (outs VRRC:$vD), (ins VRRC:$vA, VRRC:$vB), 307193323Sed "vadduwm $vD, $vA, $vB", VecGeneral, 308193323Sed [(set VRRC:$vD, (add (v4i32 VRRC:$vA), VRRC:$vB))]>; 309193323Sed 310193323Seddef VADDCUW : VX1_Int<384, "vaddcuw", int_ppc_altivec_vaddcuw>; 311193323Seddef VADDSBS : VX1_Int<768, "vaddsbs", int_ppc_altivec_vaddsbs>; 312193323Seddef VADDSHS : VX1_Int<832, "vaddshs", int_ppc_altivec_vaddshs>; 313193323Seddef VADDSWS : VX1_Int<896, "vaddsws", int_ppc_altivec_vaddsws>; 314193323Seddef VADDUBS : VX1_Int<512, "vaddubs", int_ppc_altivec_vaddubs>; 315193323Seddef VADDUHS : VX1_Int<576, "vadduhs", int_ppc_altivec_vadduhs>; 316193323Seddef VADDUWS : VX1_Int<640, "vadduws", int_ppc_altivec_vadduws>; 317193323Sed 318193323Sed 319193323Seddef VAND : VXForm_1<1028, (outs VRRC:$vD), (ins VRRC:$vA, VRRC:$vB), 320193323Sed "vand $vD, $vA, $vB", VecFP, 321193323Sed [(set VRRC:$vD, (and (v4i32 VRRC:$vA), VRRC:$vB))]>; 322193323Seddef VANDC : VXForm_1<1092, (outs VRRC:$vD), (ins VRRC:$vA, VRRC:$vB), 323193323Sed "vandc $vD, $vA, $vB", VecFP, 324193323Sed [(set VRRC:$vD, (and (v4i32 VRRC:$vA), (vnot VRRC:$vB)))]>; 325193323Sed 326193323Seddef VCFSX : VXForm_1<842, (outs VRRC:$vD), (ins u5imm:$UIMM, VRRC:$vB), 327193323Sed "vcfsx $vD, $vB, $UIMM", VecFP, 328193323Sed [(set VRRC:$vD, 329193323Sed (int_ppc_altivec_vcfsx VRRC:$vB, imm:$UIMM))]>; 330193323Seddef VCFUX : VXForm_1<778, (outs VRRC:$vD), (ins u5imm:$UIMM, VRRC:$vB), 331193323Sed "vcfux $vD, $vB, $UIMM", VecFP, 332193323Sed [(set VRRC:$vD, 333193323Sed (int_ppc_altivec_vcfux VRRC:$vB, imm:$UIMM))]>; 334193323Seddef VCTSXS : VXForm_1<970, (outs VRRC:$vD), (ins u5imm:$UIMM, VRRC:$vB), 335193323Sed "vctsxs $vD, $vB, $UIMM", VecFP, 336193323Sed [(set VRRC:$vD, 337193323Sed (int_ppc_altivec_vctsxs VRRC:$vB, imm:$UIMM))]>; 338193323Seddef VCTUXS : VXForm_1<906, (outs VRRC:$vD), (ins u5imm:$UIMM, VRRC:$vB), 339193323Sed "vctuxs $vD, $vB, $UIMM", VecFP, 340193323Sed [(set VRRC:$vD, 341193323Sed (int_ppc_altivec_vctuxs VRRC:$vB, imm:$UIMM))]>; 342193323Seddef VEXPTEFP : VX2_Int<394, "vexptefp", int_ppc_altivec_vexptefp>; 343193323Seddef VLOGEFP : VX2_Int<458, "vlogefp", int_ppc_altivec_vlogefp>; 344193323Sed 345193323Seddef VAVGSB : VX1_Int<1282, "vavgsb", int_ppc_altivec_vavgsb>; 346193323Seddef VAVGSH : VX1_Int<1346, "vavgsh", int_ppc_altivec_vavgsh>; 347193323Seddef VAVGSW : VX1_Int<1410, "vavgsw", int_ppc_altivec_vavgsw>; 348193323Seddef VAVGUB : VX1_Int<1026, "vavgub", int_ppc_altivec_vavgub>; 349193323Seddef VAVGUH : VX1_Int<1090, "vavguh", int_ppc_altivec_vavguh>; 350193323Seddef VAVGUW : VX1_Int<1154, "vavguw", int_ppc_altivec_vavguw>; 351193323Sed 352193323Seddef VMAXFP : VX1_Int<1034, "vmaxfp", int_ppc_altivec_vmaxfp>; 353193323Seddef VMAXSB : VX1_Int< 258, "vmaxsb", int_ppc_altivec_vmaxsb>; 354193323Seddef VMAXSH : VX1_Int< 322, "vmaxsh", int_ppc_altivec_vmaxsh>; 355193323Seddef VMAXSW : VX1_Int< 386, "vmaxsw", int_ppc_altivec_vmaxsw>; 356193323Seddef VMAXUB : VX1_Int< 2, "vmaxub", int_ppc_altivec_vmaxub>; 357193323Seddef VMAXUH : VX1_Int< 66, "vmaxuh", int_ppc_altivec_vmaxuh>; 358193323Seddef VMAXUW : VX1_Int< 130, "vmaxuw", int_ppc_altivec_vmaxuw>; 359193323Seddef VMINFP : VX1_Int<1098, "vminfp", int_ppc_altivec_vminfp>; 360193323Seddef VMINSB : VX1_Int< 770, "vminsb", int_ppc_altivec_vminsb>; 361193323Seddef VMINSH : VX1_Int< 834, "vminsh", int_ppc_altivec_vminsh>; 362193323Seddef VMINSW : VX1_Int< 898, "vminsw", int_ppc_altivec_vminsw>; 363193323Seddef VMINUB : VX1_Int< 514, "vminub", int_ppc_altivec_vminub>; 364193323Seddef VMINUH : VX1_Int< 578, "vminuh", int_ppc_altivec_vminuh>; 365193323Seddef VMINUW : VX1_Int< 642, "vminuw", int_ppc_altivec_vminuw>; 366193323Sed 367193323Seddef VMRGHB : VXForm_1< 12, (outs VRRC:$vD), (ins VRRC:$vA, VRRC:$vB), 368193323Sed "vmrghb $vD, $vA, $vB", VecFP, 369193323Sed [(set VRRC:$vD, (vmrghb_shuffle VRRC:$vA, VRRC:$vB))]>; 370193323Seddef VMRGHH : VXForm_1< 76, (outs VRRC:$vD), (ins VRRC:$vA, VRRC:$vB), 371193323Sed "vmrghh $vD, $vA, $vB", VecFP, 372193323Sed [(set VRRC:$vD, (vmrghh_shuffle VRRC:$vA, VRRC:$vB))]>; 373193323Seddef VMRGHW : VXForm_1<140, (outs VRRC:$vD), (ins VRRC:$vA, VRRC:$vB), 374193323Sed "vmrghw $vD, $vA, $vB", VecFP, 375193323Sed [(set VRRC:$vD, (vmrghw_shuffle VRRC:$vA, VRRC:$vB))]>; 376193323Seddef VMRGLB : VXForm_1<268, (outs VRRC:$vD), (ins VRRC:$vA, VRRC:$vB), 377193323Sed "vmrglb $vD, $vA, $vB", VecFP, 378193323Sed [(set VRRC:$vD, (vmrglb_shuffle VRRC:$vA, VRRC:$vB))]>; 379193323Seddef VMRGLH : VXForm_1<332, (outs VRRC:$vD), (ins VRRC:$vA, VRRC:$vB), 380193323Sed "vmrglh $vD, $vA, $vB", VecFP, 381193323Sed [(set VRRC:$vD, (vmrglh_shuffle VRRC:$vA, VRRC:$vB))]>; 382193323Seddef VMRGLW : VXForm_1<396, (outs VRRC:$vD), (ins VRRC:$vA, VRRC:$vB), 383193323Sed "vmrglw $vD, $vA, $vB", VecFP, 384193323Sed [(set VRRC:$vD, (vmrglw_shuffle VRRC:$vA, VRRC:$vB))]>; 385193323Sed 386193323Seddef VMSUMMBM : VA1a_Int<37, "vmsummbm", int_ppc_altivec_vmsummbm>; 387193323Seddef VMSUMSHM : VA1a_Int<40, "vmsumshm", int_ppc_altivec_vmsumshm>; 388193323Seddef VMSUMSHS : VA1a_Int<41, "vmsumshs", int_ppc_altivec_vmsumshs>; 389193323Seddef VMSUMUBM : VA1a_Int<36, "vmsumubm", int_ppc_altivec_vmsumubm>; 390193323Seddef VMSUMUHM : VA1a_Int<38, "vmsumuhm", int_ppc_altivec_vmsumuhm>; 391193323Seddef VMSUMUHS : VA1a_Int<39, "vmsumuhs", int_ppc_altivec_vmsumuhs>; 392193323Sed 393193323Seddef VMULESB : VX1_Int<776, "vmulesb", int_ppc_altivec_vmulesb>; 394193323Seddef VMULESH : VX1_Int<840, "vmulesh", int_ppc_altivec_vmulesh>; 395193323Seddef VMULEUB : VX1_Int<520, "vmuleub", int_ppc_altivec_vmuleub>; 396193323Seddef VMULEUH : VX1_Int<584, "vmuleuh", int_ppc_altivec_vmuleuh>; 397193323Seddef VMULOSB : VX1_Int<264, "vmulosb", int_ppc_altivec_vmulosb>; 398193323Seddef VMULOSH : VX1_Int<328, "vmulosh", int_ppc_altivec_vmulosh>; 399193323Seddef VMULOUB : VX1_Int< 8, "vmuloub", int_ppc_altivec_vmuloub>; 400193323Seddef VMULOUH : VX1_Int< 72, "vmulouh", int_ppc_altivec_vmulouh>; 401193323Sed 402193323Seddef VREFP : VX2_Int<266, "vrefp", int_ppc_altivec_vrefp>; 403193323Seddef VRFIM : VX2_Int<714, "vrfim", int_ppc_altivec_vrfim>; 404193323Seddef VRFIN : VX2_Int<522, "vrfin", int_ppc_altivec_vrfin>; 405193323Seddef VRFIP : VX2_Int<650, "vrfip", int_ppc_altivec_vrfip>; 406193323Seddef VRFIZ : VX2_Int<586, "vrfiz", int_ppc_altivec_vrfiz>; 407193323Seddef VRSQRTEFP : VX2_Int<330, "vrsqrtefp", int_ppc_altivec_vrsqrtefp>; 408193323Sed 409193323Seddef VSUBCUW : VX1_Int<74, "vsubcuw", int_ppc_altivec_vsubcuw>; 410193323Sed 411193323Seddef VSUBFP : VXForm_1<74, (outs VRRC:$vD), (ins VRRC:$vA, VRRC:$vB), 412193323Sed "vsubfp $vD, $vA, $vB", VecGeneral, 413193323Sed [(set VRRC:$vD, (fsub VRRC:$vA, VRRC:$vB))]>; 414193323Seddef VSUBUBM : VXForm_1<1024, (outs VRRC:$vD), (ins VRRC:$vA, VRRC:$vB), 415193323Sed "vsububm $vD, $vA, $vB", VecGeneral, 416193323Sed [(set VRRC:$vD, (sub (v16i8 VRRC:$vA), VRRC:$vB))]>; 417193323Seddef VSUBUHM : VXForm_1<1088, (outs VRRC:$vD), (ins VRRC:$vA, VRRC:$vB), 418193323Sed "vsubuhm $vD, $vA, $vB", VecGeneral, 419193323Sed [(set VRRC:$vD, (sub (v8i16 VRRC:$vA), VRRC:$vB))]>; 420193323Seddef VSUBUWM : VXForm_1<1152, (outs VRRC:$vD), (ins VRRC:$vA, VRRC:$vB), 421193323Sed "vsubuwm $vD, $vA, $vB", VecGeneral, 422193323Sed [(set VRRC:$vD, (sub (v4i32 VRRC:$vA), VRRC:$vB))]>; 423193323Sed 424193323Seddef VSUBSBS : VX1_Int<1792, "vsubsbs" , int_ppc_altivec_vsubsbs>; 425193323Seddef VSUBSHS : VX1_Int<1856, "vsubshs" , int_ppc_altivec_vsubshs>; 426193323Seddef VSUBSWS : VX1_Int<1920, "vsubsws" , int_ppc_altivec_vsubsws>; 427193323Seddef VSUBUBS : VX1_Int<1536, "vsububs" , int_ppc_altivec_vsububs>; 428193323Seddef VSUBUHS : VX1_Int<1600, "vsubuhs" , int_ppc_altivec_vsubuhs>; 429193323Seddef VSUBUWS : VX1_Int<1664, "vsubuws" , int_ppc_altivec_vsubuws>; 430193323Seddef VSUMSWS : VX1_Int<1928, "vsumsws" , int_ppc_altivec_vsumsws>; 431193323Seddef VSUM2SWS: VX1_Int<1672, "vsum2sws", int_ppc_altivec_vsum2sws>; 432193323Seddef VSUM4SBS: VX1_Int<1672, "vsum4sbs", int_ppc_altivec_vsum4sbs>; 433193323Seddef VSUM4SHS: VX1_Int<1608, "vsum4shs", int_ppc_altivec_vsum4shs>; 434193323Seddef VSUM4UBS: VX1_Int<1544, "vsum4ubs", int_ppc_altivec_vsum4ubs>; 435193323Sed 436193323Seddef VNOR : VXForm_1<1284, (outs VRRC:$vD), (ins VRRC:$vA, VRRC:$vB), 437193323Sed "vnor $vD, $vA, $vB", VecFP, 438193323Sed [(set VRRC:$vD, (vnot (or (v4i32 VRRC:$vA), VRRC:$vB)))]>; 439193323Seddef VOR : VXForm_1<1156, (outs VRRC:$vD), (ins VRRC:$vA, VRRC:$vB), 440193323Sed "vor $vD, $vA, $vB", VecFP, 441193323Sed [(set VRRC:$vD, (or (v4i32 VRRC:$vA), VRRC:$vB))]>; 442193323Seddef VXOR : VXForm_1<1220, (outs VRRC:$vD), (ins VRRC:$vA, VRRC:$vB), 443193323Sed "vxor $vD, $vA, $vB", VecFP, 444193323Sed [(set VRRC:$vD, (xor (v4i32 VRRC:$vA), VRRC:$vB))]>; 445193323Sed 446193323Seddef VRLB : VX1_Int< 4, "vrlb", int_ppc_altivec_vrlb>; 447193323Seddef VRLH : VX1_Int< 68, "vrlh", int_ppc_altivec_vrlh>; 448193323Seddef VRLW : VX1_Int< 132, "vrlw", int_ppc_altivec_vrlw>; 449193323Sed 450193323Seddef VSL : VX1_Int< 452, "vsl" , int_ppc_altivec_vsl >; 451193323Seddef VSLO : VX1_Int<1036, "vslo", int_ppc_altivec_vslo>; 452193323Seddef VSLB : VX1_Int< 260, "vslb", int_ppc_altivec_vslb>; 453193323Seddef VSLH : VX1_Int< 324, "vslh", int_ppc_altivec_vslh>; 454193323Seddef VSLW : VX1_Int< 388, "vslw", int_ppc_altivec_vslw>; 455193323Sed 456193323Seddef VSPLTB : VXForm_1<524, (outs VRRC:$vD), (ins u5imm:$UIMM, VRRC:$vB), 457193323Sed "vspltb $vD, $vB, $UIMM", VecPerm, 458193323Sed [(set VRRC:$vD, 459193323Sed (vspltb_shuffle:$UIMM (v16i8 VRRC:$vB), (undef)))]>; 460193323Seddef VSPLTH : VXForm_1<588, (outs VRRC:$vD), (ins u5imm:$UIMM, VRRC:$vB), 461193323Sed "vsplth $vD, $vB, $UIMM", VecPerm, 462193323Sed [(set VRRC:$vD, 463193323Sed (vsplth_shuffle:$UIMM (v16i8 VRRC:$vB), (undef)))]>; 464193323Seddef VSPLTW : VXForm_1<652, (outs VRRC:$vD), (ins u5imm:$UIMM, VRRC:$vB), 465193323Sed "vspltw $vD, $vB, $UIMM", VecPerm, 466193323Sed [(set VRRC:$vD, 467193323Sed (vspltw_shuffle:$UIMM (v16i8 VRRC:$vB), (undef)))]>; 468193323Sed 469193323Seddef VSR : VX1_Int< 708, "vsr" , int_ppc_altivec_vsr>; 470193323Seddef VSRO : VX1_Int<1100, "vsro" , int_ppc_altivec_vsro>; 471193323Seddef VSRAB : VX1_Int< 772, "vsrab", int_ppc_altivec_vsrab>; 472193323Seddef VSRAH : VX1_Int< 836, "vsrah", int_ppc_altivec_vsrah>; 473193323Seddef VSRAW : VX1_Int< 900, "vsraw", int_ppc_altivec_vsraw>; 474193323Seddef VSRB : VX1_Int< 516, "vsrb" , int_ppc_altivec_vsrb>; 475193323Seddef VSRH : VX1_Int< 580, "vsrh" , int_ppc_altivec_vsrh>; 476193323Seddef VSRW : VX1_Int< 644, "vsrw" , int_ppc_altivec_vsrw>; 477193323Sed 478193323Sed 479193323Seddef VSPLTISB : VXForm_3<780, (outs VRRC:$vD), (ins s5imm:$SIMM), 480193323Sed "vspltisb $vD, $SIMM", VecPerm, 481193323Sed [(set VRRC:$vD, (v16i8 vecspltisb:$SIMM))]>; 482193323Seddef VSPLTISH : VXForm_3<844, (outs VRRC:$vD), (ins s5imm:$SIMM), 483193323Sed "vspltish $vD, $SIMM", VecPerm, 484193323Sed [(set VRRC:$vD, (v8i16 vecspltish:$SIMM))]>; 485193323Seddef VSPLTISW : VXForm_3<908, (outs VRRC:$vD), (ins s5imm:$SIMM), 486193323Sed "vspltisw $vD, $SIMM", VecPerm, 487193323Sed [(set VRRC:$vD, (v4i32 vecspltisw:$SIMM))]>; 488193323Sed 489193323Sed// Vector Pack. 490193323Seddef VPKPX : VX1_Int<782, "vpkpx", int_ppc_altivec_vpkpx>; 491193323Seddef VPKSHSS : VX1_Int<398, "vpkshss", int_ppc_altivec_vpkshss>; 492193323Seddef VPKSHUS : VX1_Int<270, "vpkshus", int_ppc_altivec_vpkshus>; 493193323Seddef VPKSWSS : VX1_Int<462, "vpkswss", int_ppc_altivec_vpkswss>; 494193323Seddef VPKSWUS : VX1_Int<334, "vpkswus", int_ppc_altivec_vpkswus>; 495193323Seddef VPKUHUM : VXForm_1<14, (outs VRRC:$vD), (ins VRRC:$vA, VRRC:$vB), 496193323Sed "vpkuhum $vD, $vA, $vB", VecFP, 497193323Sed [(set VRRC:$vD, 498193323Sed (vpkuhum_shuffle (v16i8 VRRC:$vA), VRRC:$vB))]>; 499193323Seddef VPKUHUS : VX1_Int<142, "vpkuhus", int_ppc_altivec_vpkuhus>; 500193323Seddef VPKUWUM : VXForm_1<78, (outs VRRC:$vD), (ins VRRC:$vA, VRRC:$vB), 501193323Sed "vpkuwum $vD, $vA, $vB", VecFP, 502193323Sed [(set VRRC:$vD, 503193323Sed (vpkuwum_shuffle (v16i8 VRRC:$vA), VRRC:$vB))]>; 504193323Seddef VPKUWUS : VX1_Int<206, "vpkuwus", int_ppc_altivec_vpkuwus>; 505193323Sed 506193323Sed// Vector Unpack. 507193323Seddef VUPKHPX : VX2_Int<846, "vupkhpx", int_ppc_altivec_vupkhpx>; 508193323Seddef VUPKHSB : VX2_Int<526, "vupkhsb", int_ppc_altivec_vupkhsb>; 509193323Seddef VUPKHSH : VX2_Int<590, "vupkhsh", int_ppc_altivec_vupkhsh>; 510193323Seddef VUPKLPX : VX2_Int<974, "vupklpx", int_ppc_altivec_vupklpx>; 511193323Seddef VUPKLSB : VX2_Int<654, "vupklsb", int_ppc_altivec_vupklsb>; 512193323Seddef VUPKLSH : VX2_Int<718, "vupklsh", int_ppc_altivec_vupklsh>; 513193323Sed 514193323Sed 515193323Sed// Altivec Comparisons. 516193323Sed 517193323Sedclass VCMP<bits<10> xo, string asmstr, ValueType Ty> 518193323Sed : VXRForm_1<xo, (outs VRRC:$vD), (ins VRRC:$vA, VRRC:$vB),asmstr,VecFPCompare, 519193323Sed [(set VRRC:$vD, (Ty (PPCvcmp VRRC:$vA, VRRC:$vB, xo)))]>; 520193323Sedclass VCMPo<bits<10> xo, string asmstr, ValueType Ty> 521193323Sed : VXRForm_1<xo, (outs VRRC:$vD), (ins VRRC:$vA, VRRC:$vB),asmstr,VecFPCompare, 522193323Sed [(set VRRC:$vD, (Ty (PPCvcmp_o VRRC:$vA, VRRC:$vB, xo)))]> { 523193323Sed let Defs = [CR6]; 524193323Sed let RC = 1; 525193323Sed} 526193323Sed 527193323Sed// f32 element comparisons.0 528193323Seddef VCMPBFP : VCMP <966, "vcmpbfp $vD, $vA, $vB" , v4f32>; 529193323Seddef VCMPBFPo : VCMPo<966, "vcmpbfp. $vD, $vA, $vB" , v4f32>; 530193323Seddef VCMPEQFP : VCMP <198, "vcmpeqfp $vD, $vA, $vB" , v4f32>; 531193323Seddef VCMPEQFPo : VCMPo<198, "vcmpeqfp. $vD, $vA, $vB", v4f32>; 532193323Seddef VCMPGEFP : VCMP <454, "vcmpgefp $vD, $vA, $vB" , v4f32>; 533193323Seddef VCMPGEFPo : VCMPo<454, "vcmpgefp. $vD, $vA, $vB", v4f32>; 534193323Seddef VCMPGTFP : VCMP <710, "vcmpgtfp $vD, $vA, $vB" , v4f32>; 535193323Seddef VCMPGTFPo : VCMPo<710, "vcmpgtfp. $vD, $vA, $vB", v4f32>; 536193323Sed 537193323Sed// i8 element comparisons. 538193323Seddef VCMPEQUB : VCMP < 6, "vcmpequb $vD, $vA, $vB" , v16i8>; 539193323Seddef VCMPEQUBo : VCMPo< 6, "vcmpequb. $vD, $vA, $vB", v16i8>; 540193323Seddef VCMPGTSB : VCMP <774, "vcmpgtsb $vD, $vA, $vB" , v16i8>; 541193323Seddef VCMPGTSBo : VCMPo<774, "vcmpgtsb. $vD, $vA, $vB", v16i8>; 542193323Seddef VCMPGTUB : VCMP <518, "vcmpgtub $vD, $vA, $vB" , v16i8>; 543193323Seddef VCMPGTUBo : VCMPo<518, "vcmpgtub. $vD, $vA, $vB", v16i8>; 544193323Sed 545193323Sed// i16 element comparisons. 546193323Seddef VCMPEQUH : VCMP < 70, "vcmpequh $vD, $vA, $vB" , v8i16>; 547193323Seddef VCMPEQUHo : VCMPo< 70, "vcmpequh. $vD, $vA, $vB", v8i16>; 548193323Seddef VCMPGTSH : VCMP <838, "vcmpgtsh $vD, $vA, $vB" , v8i16>; 549193323Seddef VCMPGTSHo : VCMPo<838, "vcmpgtsh. $vD, $vA, $vB", v8i16>; 550193323Seddef VCMPGTUH : VCMP <582, "vcmpgtuh $vD, $vA, $vB" , v8i16>; 551193323Seddef VCMPGTUHo : VCMPo<582, "vcmpgtuh. $vD, $vA, $vB", v8i16>; 552193323Sed 553193323Sed// i32 element comparisons. 554193323Seddef VCMPEQUW : VCMP <134, "vcmpequw $vD, $vA, $vB" , v4i32>; 555193323Seddef VCMPEQUWo : VCMPo<134, "vcmpequw. $vD, $vA, $vB", v4i32>; 556193323Seddef VCMPGTSW : VCMP <902, "vcmpgtsw $vD, $vA, $vB" , v4i32>; 557193323Seddef VCMPGTSWo : VCMPo<902, "vcmpgtsw. $vD, $vA, $vB", v4i32>; 558193323Seddef VCMPGTUW : VCMP <646, "vcmpgtuw $vD, $vA, $vB" , v4i32>; 559193323Seddef VCMPGTUWo : VCMPo<646, "vcmpgtuw. $vD, $vA, $vB", v4i32>; 560193323Sed 561193323Seddef V_SET0 : VXForm_setzero<1220, (outs VRRC:$vD), (ins), 562193323Sed "vxor $vD, $vD, $vD", VecFP, 563193323Sed [(set VRRC:$vD, (v4i32 immAllZerosV))]>; 564193323Sed} 565193323Sed 566193323Sed//===----------------------------------------------------------------------===// 567193323Sed// Additional Altivec Patterns 568193323Sed// 569193323Sed 570193323Sed// DS* intrinsics 571193323Seddef : Pat<(int_ppc_altivec_dssall), (DSSALL 1, 0, 0, 0)>; 572193323Seddef : Pat<(int_ppc_altivec_dss imm:$STRM), (DSS 0, imm:$STRM, 0, 0)>; 573193323Sed 574193323Sed// * 32-bit 575193323Seddef : Pat<(int_ppc_altivec_dst GPRC:$rA, GPRC:$rB, imm:$STRM), 576193323Sed (DST 0, imm:$STRM, GPRC:$rA, GPRC:$rB)>; 577193323Seddef : Pat<(int_ppc_altivec_dstt GPRC:$rA, GPRC:$rB, imm:$STRM), 578193323Sed (DSTT 1, imm:$STRM, GPRC:$rA, GPRC:$rB)>; 579193323Seddef : Pat<(int_ppc_altivec_dstst GPRC:$rA, GPRC:$rB, imm:$STRM), 580193323Sed (DSTST 0, imm:$STRM, GPRC:$rA, GPRC:$rB)>; 581193323Seddef : Pat<(int_ppc_altivec_dststt GPRC:$rA, GPRC:$rB, imm:$STRM), 582193323Sed (DSTSTT 1, imm:$STRM, GPRC:$rA, GPRC:$rB)>; 583193323Sed 584193323Sed// * 64-bit 585193323Seddef : Pat<(int_ppc_altivec_dst G8RC:$rA, GPRC:$rB, imm:$STRM), 586193323Sed (DST64 0, imm:$STRM, (i64 G8RC:$rA), GPRC:$rB)>; 587193323Seddef : Pat<(int_ppc_altivec_dstt G8RC:$rA, GPRC:$rB, imm:$STRM), 588193323Sed (DSTT64 1, imm:$STRM, (i64 G8RC:$rA), GPRC:$rB)>; 589193323Seddef : Pat<(int_ppc_altivec_dstst G8RC:$rA, GPRC:$rB, imm:$STRM), 590193323Sed (DSTST64 0, imm:$STRM, (i64 G8RC:$rA), GPRC:$rB)>; 591193323Seddef : Pat<(int_ppc_altivec_dststt G8RC:$rA, GPRC:$rB, imm:$STRM), 592193323Sed (DSTSTT64 1, imm:$STRM, (i64 G8RC:$rA), GPRC:$rB)>; 593193323Sed 594193323Sed// Loads. 595193323Seddef : Pat<(v4i32 (load xoaddr:$src)), (LVX xoaddr:$src)>; 596193323Sed 597193323Sed// Stores. 598193323Seddef : Pat<(store (v4i32 VRRC:$rS), xoaddr:$dst), 599193323Sed (STVX (v4i32 VRRC:$rS), xoaddr:$dst)>; 600193323Sed 601193323Sed// Bit conversions. 602193323Seddef : Pat<(v16i8 (bitconvert (v8i16 VRRC:$src))), (v16i8 VRRC:$src)>; 603193323Seddef : Pat<(v16i8 (bitconvert (v4i32 VRRC:$src))), (v16i8 VRRC:$src)>; 604193323Seddef : Pat<(v16i8 (bitconvert (v4f32 VRRC:$src))), (v16i8 VRRC:$src)>; 605193323Sed 606193323Seddef : Pat<(v8i16 (bitconvert (v16i8 VRRC:$src))), (v8i16 VRRC:$src)>; 607193323Seddef : Pat<(v8i16 (bitconvert (v4i32 VRRC:$src))), (v8i16 VRRC:$src)>; 608193323Seddef : Pat<(v8i16 (bitconvert (v4f32 VRRC:$src))), (v8i16 VRRC:$src)>; 609193323Sed 610193323Seddef : Pat<(v4i32 (bitconvert (v16i8 VRRC:$src))), (v4i32 VRRC:$src)>; 611193323Seddef : Pat<(v4i32 (bitconvert (v8i16 VRRC:$src))), (v4i32 VRRC:$src)>; 612193323Seddef : Pat<(v4i32 (bitconvert (v4f32 VRRC:$src))), (v4i32 VRRC:$src)>; 613193323Sed 614193323Seddef : Pat<(v4f32 (bitconvert (v16i8 VRRC:$src))), (v4f32 VRRC:$src)>; 615193323Seddef : Pat<(v4f32 (bitconvert (v8i16 VRRC:$src))), (v4f32 VRRC:$src)>; 616193323Seddef : Pat<(v4f32 (bitconvert (v4i32 VRRC:$src))), (v4f32 VRRC:$src)>; 617193323Sed 618193323Sed// Shuffles. 619193323Sed 620193323Sed// Match vsldoi(x,x), vpkuwum(x,x), vpkuhum(x,x) 621193323Seddef:Pat<(vsldoi_unary_shuffle:$in (v16i8 VRRC:$vA), undef), 622193323Sed (VSLDOI VRRC:$vA, VRRC:$vA, (VSLDOI_unary_get_imm VRRC:$in))>; 623193323Seddef:Pat<(vpkuwum_unary_shuffle (v16i8 VRRC:$vA), undef), 624193323Sed (VPKUWUM VRRC:$vA, VRRC:$vA)>; 625193323Seddef:Pat<(vpkuhum_unary_shuffle (v16i8 VRRC:$vA), undef), 626193323Sed (VPKUHUM VRRC:$vA, VRRC:$vA)>; 627193323Sed 628193323Sed// Match vmrg*(x,x) 629193323Seddef:Pat<(vmrglb_unary_shuffle (v16i8 VRRC:$vA), undef), 630193323Sed (VMRGLB VRRC:$vA, VRRC:$vA)>; 631193323Seddef:Pat<(vmrglh_unary_shuffle (v16i8 VRRC:$vA), undef), 632193323Sed (VMRGLH VRRC:$vA, VRRC:$vA)>; 633193323Seddef:Pat<(vmrglw_unary_shuffle (v16i8 VRRC:$vA), undef), 634193323Sed (VMRGLW VRRC:$vA, VRRC:$vA)>; 635193323Seddef:Pat<(vmrghb_unary_shuffle (v16i8 VRRC:$vA), undef), 636193323Sed (VMRGHB VRRC:$vA, VRRC:$vA)>; 637193323Seddef:Pat<(vmrghh_unary_shuffle (v16i8 VRRC:$vA), undef), 638193323Sed (VMRGHH VRRC:$vA, VRRC:$vA)>; 639193323Seddef:Pat<(vmrghw_unary_shuffle (v16i8 VRRC:$vA), undef), 640193323Sed (VMRGHW VRRC:$vA, VRRC:$vA)>; 641193323Sed 642193323Sed// Logical Operations 643193323Seddef : Pat<(v4i32 (vnot VRRC:$vA)), (VNOR VRRC:$vA, VRRC:$vA)>; 644193323Seddef : Pat<(v4i32 (vnot_conv VRRC:$vA)), (VNOR VRRC:$vA, VRRC:$vA)>; 645193323Sed 646193323Seddef : Pat<(v4i32 (vnot_conv (or VRRC:$A, VRRC:$B))), 647193323Sed (VNOR VRRC:$A, VRRC:$B)>; 648193323Seddef : Pat<(v4i32 (and VRRC:$A, (vnot_conv VRRC:$B))), 649193323Sed (VANDC VRRC:$A, VRRC:$B)>; 650193323Sed 651193323Seddef : Pat<(fmul VRRC:$vA, VRRC:$vB), 652193323Sed (VMADDFP VRRC:$vA, VRRC:$vB, (v4i32 (V_SET0)))>; 653193323Sed 654193323Sed// Fused multiply add and multiply sub for packed float. These are represented 655193323Sed// separately from the real instructions above, for operations that must have 656193323Sed// the additional precision, such as Newton-Rhapson (used by divide, sqrt) 657193323Seddef : Pat<(PPCvmaddfp VRRC:$A, VRRC:$B, VRRC:$C), 658193323Sed (VMADDFP VRRC:$A, VRRC:$B, VRRC:$C)>; 659193323Seddef : Pat<(PPCvnmsubfp VRRC:$A, VRRC:$B, VRRC:$C), 660193323Sed (VNMSUBFP VRRC:$A, VRRC:$B, VRRC:$C)>; 661193323Sed 662193323Seddef : Pat<(int_ppc_altivec_vmaddfp VRRC:$A, VRRC:$B, VRRC:$C), 663193323Sed (VMADDFP VRRC:$A, VRRC:$B, VRRC:$C)>; 664193323Seddef : Pat<(int_ppc_altivec_vnmsubfp VRRC:$A, VRRC:$B, VRRC:$C), 665193323Sed (VNMSUBFP VRRC:$A, VRRC:$B, VRRC:$C)>; 666193323Sed 667193323Seddef : Pat<(PPCvperm (v16i8 VRRC:$vA), VRRC:$vB, VRRC:$vC), 668193323Sed (VPERM VRRC:$vA, VRRC:$vB, VRRC:$vC)>; 669193630Sed 670193630Sed// Vector shifts 671193630Seddef : Pat<(v16i8 (shl (v16i8 VRRC:$vA), (v16i8 VRRC:$vB))), 672193630Sed (v16i8 (VSLB VRRC:$vA, VRRC:$vB))>; 673193630Seddef : Pat<(v8i16 (shl (v8i16 VRRC:$vA), (v8i16 VRRC:$vB))), 674193630Sed (v8i16 (VSLH VRRC:$vA, VRRC:$vB))>; 675193630Seddef : Pat<(v4i32 (shl (v4i32 VRRC:$vA), (v4i32 VRRC:$vB))), 676193630Sed (v4i32 (VSLW VRRC:$vA, VRRC:$vB))>; 677193630Sed 678193630Seddef : Pat<(v16i8 (srl (v16i8 VRRC:$vA), (v16i8 VRRC:$vB))), 679193630Sed (v16i8 (VSRB VRRC:$vA, VRRC:$vB))>; 680193630Seddef : Pat<(v8i16 (srl (v8i16 VRRC:$vA), (v8i16 VRRC:$vB))), 681193630Sed (v8i16 (VSRH VRRC:$vA, VRRC:$vB))>; 682193630Seddef : Pat<(v4i32 (srl (v4i32 VRRC:$vA), (v4i32 VRRC:$vB))), 683193630Sed (v4i32 (VSRW VRRC:$vA, VRRC:$vB))>; 684193630Sed 685193630Seddef : Pat<(v16i8 (sra (v16i8 VRRC:$vA), (v16i8 VRRC:$vB))), 686193630Sed (v16i8 (VSRAB VRRC:$vA, VRRC:$vB))>; 687193630Seddef : Pat<(v8i16 (sra (v8i16 VRRC:$vA), (v8i16 VRRC:$vB))), 688193630Sed (v8i16 (VSRAH VRRC:$vA, VRRC:$vB))>; 689193630Seddef : Pat<(v4i32 (sra (v4i32 VRRC:$vA), (v4i32 VRRC:$vB))), 690193630Sed (v4i32 (VSRAW VRRC:$vA, VRRC:$vB))>; 691