Lines Matching refs:dst_reg

8 /* ALU ops on registers, bpf_add|sub|...: dst_reg += src_reg */
13 .dst_reg = DST, \
21 .dst_reg = DST, \
26 /* ALU ops on immediates, bpf_add|sub|...: dst_reg += imm32 */
31 .dst_reg = DST, \
39 .dst_reg = DST, \
44 /* Short form of mov, dst_reg = src_reg */
49 .dst_reg = DST, \
57 .dst_reg = DST, \
62 /* Short form of mov, dst_reg = imm32 */
67 .dst_reg = DST, \
75 .dst_reg = DST, \
87 .dst_reg = DST, \
93 .dst_reg = 0, \
112 .dst_reg = 0, \
117 /* Memory load, dst_reg = *(uint *) (src_reg + off16) */
122 .dst_reg = DST, \
127 /* Memory store, *(uint *) (dst_reg + off16) = src_reg */
132 .dst_reg = DST, \
140 * BPF_ADD *(uint *) (dst_reg + off16) += src_reg
141 * BPF_AND *(uint *) (dst_reg + off16) &= src_reg
142 * BPF_OR *(uint *) (dst_reg + off16) |= src_reg
143 * BPF_XOR *(uint *) (dst_reg + off16) ^= src_reg
144 * BPF_ADD | BPF_FETCH src_reg = atomic_fetch_add(dst_reg + off16, src_reg);
145 * BPF_AND | BPF_FETCH src_reg = atomic_fetch_and(dst_reg + off16, src_reg);
146 * BPF_OR | BPF_FETCH src_reg = atomic_fetch_or(dst_reg + off16, src_reg);
147 * BPF_XOR | BPF_FETCH src_reg = atomic_fetch_xor(dst_reg + off16, src_reg);
148 * BPF_XCHG src_reg = atomic_xchg(dst_reg + off16, src_reg)
149 * BPF_CMPXCHG r0 = atomic_cmpxchg(dst_reg + off16, r0, src_reg)
155 .dst_reg = DST, \
163 /* Memory store, *(uint *) (dst_reg + off16) = imm32 */
168 .dst_reg = DST, \
173 /* Conditional jumps against registers, if (dst_reg 'op' src_reg) goto pc + off16 */
178 .dst_reg = DST, \
188 .dst_reg = DST, \
193 /* Conditional jumps against immediates, if (dst_reg 'op' imm32) goto pc + off16 */
198 .dst_reg = DST, \
208 .dst_reg = DST, \
218 .dst_reg = DST, \
228 .dst_reg = 0, \