Lines Matching refs:dst_reg

32 /* ALU ops on registers, bpf_add|sub|...: dst_reg += src_reg */
37 .dst_reg = DST, \
45 .dst_reg = DST, \
50 /* ALU ops on immediates, bpf_add|sub|...: dst_reg += imm32 */
55 .dst_reg = DST, \
63 .dst_reg = DST, \
73 .dst_reg = DST, \
78 /* Short form of mov, dst_reg = src_reg */
83 .dst_reg = DST, \
91 .dst_reg = DST, \
96 /* Short form of mov, dst_reg = imm32 */
101 .dst_reg = DST, \
109 .dst_reg = DST, \
114 /* Short form of mov based on type, BPF_X: dst_reg = src_reg, BPF_K: dst_reg = imm32 */
119 .dst_reg = DST, \
127 .dst_reg = DST, \
137 .dst_reg = 0, \
147 .dst_reg = 0, \
152 /* Memory load, dst_reg = *(uint *) (src_reg + off16) */
157 .dst_reg = DST, \
162 /* Memory store, *(uint *) (dst_reg + off16) = src_reg */
167 .dst_reg = DST, \
175 * BPF_ADD *(uint *) (dst_reg + off16) += src_reg
176 * BPF_AND *(uint *) (dst_reg + off16) &= src_reg
177 * BPF_OR *(uint *) (dst_reg + off16) |= src_reg
178 * BPF_XOR *(uint *) (dst_reg + off16) ^= src_reg
179 * BPF_ADD | BPF_FETCH src_reg = atomic_fetch_add(dst_reg + off16, src_reg);
180 * BPF_AND | BPF_FETCH src_reg = atomic_fetch_and(dst_reg + off16, src_reg);
181 * BPF_OR | BPF_FETCH src_reg = atomic_fetch_or(dst_reg + off16, src_reg);
182 * BPF_XOR | BPF_FETCH src_reg = atomic_fetch_xor(dst_reg + off16, src_reg);
183 * BPF_XCHG src_reg = atomic_xchg(dst_reg + off16, src_reg)
184 * BPF_CMPXCHG r0 = atomic_cmpxchg(dst_reg + off16, r0, src_reg)
190 .dst_reg = DST, \
198 /* Memory store, *(uint *) (dst_reg + off16) = imm32 */
203 .dst_reg = DST, \
208 /* Conditional jumps against registers, if (dst_reg 'op' src_reg) goto pc + off16 */
213 .dst_reg = DST, \
223 .dst_reg = DST, \
228 /* Conditional jumps against immediates, if (dst_reg 'op' imm32) goto pc + off16 */
233 .dst_reg = DST, \
243 .dst_reg = DST, \
253 .dst_reg = 0, \
263 .dst_reg = 0, \
273 .dst_reg = DST, \
286 .dst_reg = DST, \
292 .dst_reg = 0, \
300 .dst_reg = DST, \
306 .dst_reg = 0, \
326 .dst_reg = 0, \
336 .dst_reg = 0, \