• Home
  • History
  • Annotate
  • Raw
  • Download
  • only in /netgear-R7000-V1.0.7.12_1.2.5/ap/gpl/amule/wxWidgets-2.8.12/src/png/

Lines Matching defs:mm2

560                   movq       mm2,mask2
564 pand mm2,mm7
568 pcmpeqb mm2,mm6
595 pand mm6,mm2
596 movq mm4,mm2
696 movq mm2,mask2
701 pand mm2,mm7
706 pcmpeqb mm2,mm6
734 pand mm6,mm2
735 movq mm4,mm2
841 movq mm2,mask2
848 pand mm2,mm7
855 pcmpeqb mm2,mm6
883 pand mm6,mm2
884 movq mm7,mm2
1239 movq mm2, mm0 ; 0 0 0 v2 v1 v0 0 0
1242 por mm0, mm2 ; v2 v1 v0 v2 v1 v0 0 0
1274 movq mm2, mm0 ; 0 0 0 v2 v1 v0 0 0
1277 por mm0, mm2 ; v2 v1 v0 v2 v1 v0 0 0
1364 movq mm2, mm0 ; v0 v0 v1 v1 v2 v2 v3 v3
1370 punpckhwd mm2, mm2 ; v0 v0 v0 v0 v1 v1 v1 v1
1372 movq mm4, mm2 ; v0 v0 v0 v0 v1 v1 v1 v1
1373 punpckldq mm2, mm2 ; v1 v1 v1 v1 v1 v1 v1 v1
1375 movq [edi+16], mm2 ; move to memory v1
2007 movq mm2, [edi + ebx - 8] /* Load previous aligned 8 bytes */
2013 psrlq mm2, ShiftRem /* Correct position Raw(x-bpp) data */
2022 pand mm1, mm2 /* get LBCarrys for each byte where both */
2024 psrlq mm2, 1 /* divide raw bytes by 2 */
2025 pand mm2, mm4 /* clear invalid bit 7 of each byte */
2026 paddb mm2, mm1 /* add LBCarrys to (Raw(x-bpp)/2) for each byte */
2027 pand mm2, mm6 /* Leave only Active Group 1 bytes to add to Avg */
2028 paddb mm0, mm2 /* add (Raw/2) + LBCarrys to Avg for each Active */
2032 movq mm2, mm0 /* mov updated Raws to mm2 */
2033 psllq mm2, ShiftBpp /* shift data to position correctly */
2035 pand mm1, mm2 /* get LBCarrys for each byte where both */
2037 psrlq mm2, 1 /* divide raw bytes by 2 */
2038 pand mm2, mm4 /* clear invalid bit 7 of each byte */
2039 paddb mm2, mm1 /* add LBCarrys to (Raw(x-bpp)/2) for each byte */
2040 pand mm2, mm6 /* Leave only Active Group 2 bytes to add to Avg */
2041 paddb mm0, mm2 /* add (Raw/2) + LBCarrys to Avg for each Active */
2047 movq mm2, mm0 /* mov updated Raws to mm2 */
2048 psllq mm2, ShiftBpp /* shift data to position correctly */
2052 pand mm1, mm2 /* get LBCarrys for each byte where both */
2054 psrlq mm2, 1 /* divide raw bytes by 2 */
2055 pand mm2, mm4 /* clear invalid bit 7 of each byte */
2056 paddb mm2, mm1 /* add LBCarrys to (Raw(x-bpp)/2) for each byte */
2057 pand mm2, mm6 /* Leave only Active Group 2 bytes to add to Avg */
2059 paddb mm0, mm2 /* add (Raw/2) + LBCarrys to Avg for each Active */
2066 movq mm2, mm0 /* mov updated Raw(x) to mm2 */
2094 movq mm2, [edi + ebx - 8] /* Load previous aligned 8 bytes */
2098 psrlq mm2, ShiftRem /* shift data to position correctly */
2108 pand mm1, mm2 /* get LBCarrys for each byte where both */
2110 psrlq mm2, 1 /* divide raw bytes by 2 */
2111 pand mm2, mm4 /* clear invalid bit 7 of each byte */
2112 paddb mm2, mm1 /* add LBCarrys to (Raw(x-bpp)/2) for each byte */
2113 pand mm2, mm7 /* Leave only Active Group 1 bytes to add to Avg */
2114 paddb mm0, mm2 /* add (Raw/2) + LBCarrys to Avg for each Active */
2117 movq mm2, mm0 /* mov updated Raws to mm2 */
2118 psllq mm2, ShiftBpp /* shift data to position correctly */
2121 pand mm1, mm2 /* get LBCarrys for each byte where both */
2123 psrlq mm2, 1 /* divide raw bytes by 2 */
2124 pand mm2, mm4 /* clear invalid bit 7 of each byte */
2125 paddb mm2, mm1 /* add LBCarrys to (Raw(x-bpp)/2) for each byte */
2126 pand mm2, mm6 /* Leave only Active Group 2 bytes to add to Avg */
2127 paddb mm0, mm2 /* add (Raw/2) + LBCarrys to Avg for each Active */
2133 movq mm2, mm0 /* mov updated Raws to mm2 */
2153 movq mm2, [edi + ebx - 8] /* Load previous aligned 8 bytes */
2157 psrlq mm2, ShiftRem /* shift data to position correctly [BUGFIX] */
2168 pand mm1, mm2 /* get LBCarrys for each byte where both */
2170 psrlq mm2, 1 /* divide raw bytes by 2 */
2171 pand mm2, mm4 /* clear invalid bit 7 of each byte */
2172 paddb mm2, mm1 /* add LBCarrys to (Raw(x-bpp)/2) for each byte */
2173 pand mm2, mm6 /* Leave only Active Group 1 bytes to add to Avg */
2174 paddb mm0, mm2 /* add (Raw/2) + LBCarrys to Avg for each Active byte */
2177 movq mm2, mm0 /* mov updated Raws to mm2 */
2178 psllq mm2, ShiftBpp /* shift data to position correctly */
2180 pand mm1, mm2 /* get LBCarrys for each byte where both */
2182 psrlq mm2, 1 /* divide raw bytes by 2 */
2183 pand mm2, mm4 /* clear invalid bit 7 of each byte */
2184 paddb mm2, mm1 /* add LBCarrys to (Raw(x-bpp)/2) for each byte */
2185 pand mm2, mm6 /* Leave only Active Group 2 bytes to add to Avg */
2186 paddb mm0, mm2 /* add (Raw/2) + LBCarrys to Avg for each Active byte */
2190 movq mm2, mm0 /* mov updated Raws to mm2 */
2191 psllq mm2, ShiftBpp /* shift data to position correctly */
2195 pand mm1, mm2 /* get LBCarrys for each byte where both */
2197 psrlq mm2, 1 /* divide raw bytes by 2 */
2198 pand mm2, mm4 /* clear invalid bit 7 of each byte */
2199 paddb mm2, mm1 /* add LBCarrys to (Raw(x-bpp)/2) for each byte */
2200 pand mm2, mm6 /* Leave only Active Group 2 bytes to add to Avg */
2201 paddb mm0, mm2 /* add (Raw/2) + LBCarrys to Avg for each Active byte */
2205 movq mm2, mm0 /* mov updated Raws to mm2 */
2206 psllq mm2, ShiftBpp /* shift data to position correctly */
2211 pand mm1, mm2 /* get LBCarrys for each byte where both */
2213 psrlq mm2, 1 /* divide raw bytes by 2 */
2214 pand mm2, mm4 /* clear invalid bit 7 of each byte */
2215 paddb mm2, mm1 /* add LBCarrys to (Raw(x-bpp)/2) for each byte */
2216 pand mm2, mm6 /* Leave only Active Group 2 bytes to add to Avg */
2217 paddb mm0, mm2 /* add (Raw/2) + LBCarrys to Avg for each Active byte */
2223 movq mm2, mm0 /* mov updated Raws to mm2 */
2270 movq mm2, [edi + ebx - 8] /* Load previous aligned 8 bytes */
2279 pand mm3, mm2 /* get LBCarrys for each byte where both */
2281 psrlq mm2, 1 /* divide raw bytes by 2 */
2284 pand mm2, mm4 /* clear invalid bit 7 of each byte */
2286 paddb mm0, mm2 /* add (Raw/2) to Avg for each byte */
2289 movq mm2, mm0 /* reuse as Raw(x-bpp) */
2310 movq mm2, [edx + ebx]
2312 pand mm3, mm2 /* get LBCarrys for each byte where both */
2314 psrlq mm2, 1 /* divide raw bytes by 2 */
2317 pand mm2, mm4 /* clear invalid bit 7 of each byte */
2320 paddb mm0, mm2 /* add (Raw/2) to Avg for each byte */
2491 movq mm2, [esi + ebx] /* load b=Prior(x) */
2494 punpcklbw mm2, mm0 /* Unpack High bytes of b */
2497 movq mm4, mm2
2531 pand mm2, mm0
2535 paddw mm0, mm2
2546 movq mm2, mm3 /* load b=Prior(x) step 1 */
2552 psrlq mm2, ShiftBpp /* load b=Prior(x) step 2 */
2555 punpcklbw mm2, mm0 /* Unpack High bytes of b */
2559 movq mm4, mm2
2590 pand mm2, mm0
2594 paddw mm0, mm2
2597 movq mm2, [esi + ebx] /* load b=Prior(x) */
2604 movq mm3, mm2 /* load c=Prior(x-bpp) step 1 */
2606 punpckhbw mm2, mm0 /* Unpack High bytes of b */
2609 movq mm4, mm2
2650 pand mm2, mm0
2655 paddw mm0, mm2
2701 movq mm2, [esi + ebx] /* load b=Prior(x) */
2702 punpcklbw mm2, mm0 /* Unpack Low bytes of b */
2706 movq mm4, mm2
2739 pand mm2, mm0
2743 paddw mm0, mm2
2755 movq mm2, [esi + ebx] /* load b=Prior(x) step 1 */
2757 movq mm6, mm2
2768 punpckhbw mm2, mm0 /* Unpack High bytes of b */
2771 movq mm4, mm2
2803 pand mm2, mm0
2807 paddw mm0, mm2
2843 movq mm2, [esi + ebx] /* load b=Prior(x) */
2844 punpcklbw mm2, mm0 /* Unpack High bytes of b */
2846 movq mm4, mm2
2879 pand mm2, mm0
2883 paddw mm0, mm2
2894 movq mm2, mm3 /* load b=Prior(x) step 1 */
2900 punpckhbw mm2, mm0 /* Unpack Low bytes of b */
2903 movq mm4, mm2
2935 pand mm2, mm0
2939 paddw mm0, mm2
2974 movq mm2, [esi + ebx] /* load b=Prior(x) */
2975 punpcklbw mm2, mm0 /* Unpack Low bytes of b */
2977 movq mm4, mm2
3010 pand mm2, mm0
3014 paddw mm0, mm2
3025 movq mm2, [esi + ebx] /* load b=Prior(x) */
3032 punpckhbw mm2, mm0 /* Unpack High bytes of b */
3035 movq mm4, mm2
3067 pand mm2, mm0
3071 paddw mm0, mm2
3480 movq mm2, [edi+ebx+16] /* Load Sub(x) for 3rd 8 bytes */
3482 paddb mm2, mm1
3484 movq [edi+ebx+16], mm2 /* Write Raw(x) for 3rd 8 bytes */
3485 paddb mm3, mm2
3598 movq mm2, [edi+ebx+8]
3600 paddb mm2, mm3
3602 movq [edi+ebx+8], mm2
3614 movq mm2, [edi+ebx+40]
3616 paddb mm2, mm3
3618 movq [edi+ebx+40], mm2