Searched refs:T7 (Results 1 - 7 of 7) sorted by relevance

/linux-master/arch/x86/crypto/
H A Daesni-intel_avx-x86_64.S571 .macro CALC_AAD_HASH GHASH_MUL AAD AADLEN T1 T2 T3 T4 T5 T6 T7 T8
580 vpxor \T7, \T7, \T7
584 vmovdqu (%r10), \T7
585 vpshufb SHUF_MASK(%rip), \T7, \T7
586 vpxor \T7, \T8, \T8
593 vmovdqu \T8, \T7
597 vpxor \T7, \T
[all...]
H A Dnh-sse2-x86_64.S28 #define T7 %xmm15 define
57 pshufd $0x10, T3, T7
62 pmuludq T7, T3
H A Dnh-avx2-x86_64.S34 #define T7 %ymm15 define
55 vpshufd $0x10, T3, T7
60 vpmuludq T7, T3, T3
/linux-master/arch/arm64/crypto/
H A Dnh-neon-core.S33 T7 .req v15
53 mov T7.d[0], T3.d[1]
57 umlal PASS3_SUMS.2d, T3.2s, T7.2s
H A Dsha512-armv8.pl463 my ($T0,$T1,$T2,$T3,$T4,$T5,$T6,$T7) = map("q$_",(4..7,16..19));
490 &mov (&Dscalar($T7),&Dhi(@X[3])); # X[14..15]
511 &ushr_32 ($T4,$T7,$sigma1[0]);
517 &sli_32 ($T4,$T7,32-$sigma1[0]);
520 &ushr_32 ($T5,$T7,$sigma1[2]);
523 &ushr_32 ($T3,$T7,$sigma1[1]);
529 &sli_u32 ($T3,$T7,32-$sigma1[1]);
546 &ushr_32 ($T7,@X[0],$sigma1[2]);
554 &eor_8 ($T7,$T7,
[all...]
H A Dsm4-ce-gcm-core.S54 r6, r7, m6, m7, T6, T7) \
66 pmull T7.1q, m6.1d, T6.1d; \
78 eor T6.16b, T6.16b, T7.16b; \
82 ext T7.16b, RZERO.16b, T6.16b, #8; \
90 eor r6.16b, r6.16b, T7.16b; \
/linux-master/crypto/
H A Dkhazad.c661 static const u64 T7[256] = { variable
762 const u64 *S = T7;
778 T7[(int)(K1 ) & 0xff] ^
794 T7[(int)S[(int)(K1 ) & 0xff] & 0xff];
820 T7[(int)(state ) & 0xff] ^
831 (T7[(int)(state ) & 0xff] & 0x00000000000000ffULL) ^

Completed in 252 milliseconds