Lines Matching defs:bt

3431 static inline void bt_init(struct backtrack_state *bt, u32 frame)
3433 bt->frame = frame;
3436 static inline void bt_reset(struct backtrack_state *bt)
3438 struct bpf_verifier_env *env = bt->env;
3440 memset(bt, 0, sizeof(*bt));
3441 bt->env = env;
3444 static inline u32 bt_empty(struct backtrack_state *bt)
3449 for (i = 0; i <= bt->frame; i++)
3450 mask |= bt->reg_masks[i] | bt->stack_masks[i];
3455 static inline int bt_subprog_enter(struct backtrack_state *bt)
3457 if (bt->frame == MAX_CALL_FRAMES - 1) {
3458 verbose(bt->env, "BUG subprog enter from frame %d\n", bt->frame);
3462 bt->frame++;
3466 static inline int bt_subprog_exit(struct backtrack_state *bt)
3468 if (bt->frame == 0) {
3469 verbose(bt->env, "BUG subprog exit from frame 0\n");
3473 bt->frame--;
3477 static inline void bt_set_frame_reg(struct backtrack_state *bt, u32 frame, u32 reg)
3479 bt->reg_masks[frame] |= 1 << reg;
3482 static inline void bt_clear_frame_reg(struct backtrack_state *bt, u32 frame, u32 reg)
3484 bt->reg_masks[frame] &= ~(1 << reg);
3487 static inline void bt_set_reg(struct backtrack_state *bt, u32 reg)
3489 bt_set_frame_reg(bt, bt->frame, reg);
3492 static inline void bt_clear_reg(struct backtrack_state *bt, u32 reg)
3494 bt_clear_frame_reg(bt, bt->frame, reg);
3497 static inline void bt_set_frame_slot(struct backtrack_state *bt, u32 frame, u32 slot)
3499 bt->stack_masks[frame] |= 1ull << slot;
3502 static inline void bt_clear_frame_slot(struct backtrack_state *bt, u32 frame, u32 slot)
3504 bt->stack_masks[frame] &= ~(1ull << slot);
3507 static inline u32 bt_frame_reg_mask(struct backtrack_state *bt, u32 frame)
3509 return bt->reg_masks[frame];
3512 static inline u32 bt_reg_mask(struct backtrack_state *bt)
3514 return bt->reg_masks[bt->frame];
3517 static inline u64 bt_frame_stack_mask(struct backtrack_state *bt, u32 frame)
3519 return bt->stack_masks[frame];
3522 static inline u64 bt_stack_mask(struct backtrack_state *bt)
3524 return bt->stack_masks[bt->frame];
3527 static inline bool bt_is_reg_set(struct backtrack_state *bt, u32 reg)
3529 return bt->reg_masks[bt->frame] & (1 << reg);
3532 static inline bool bt_is_frame_slot_set(struct backtrack_state *bt, u32 frame, u32 slot)
3534 return bt->stack_masks[frame] & (1ull << slot);
3588 struct bpf_jmp_history_entry *hist, struct backtrack_state *bt)
3606 fmt_reg_mask(env->tmp_str_buf, TMP_STR_BUF_LEN, bt_reg_mask(bt));
3608 bt->frame, env->tmp_str_buf);
3609 fmt_stack_mask(env->tmp_str_buf, TMP_STR_BUF_LEN, bt_stack_mask(bt));
3616 if (!bt_is_reg_set(bt, dreg))
3629 bt_clear_reg(bt, dreg);
3631 bt_set_reg(bt, sreg);
3639 bt_clear_reg(bt, dreg);
3648 bt_set_reg(bt, sreg);
3654 if (!bt_is_reg_set(bt, dreg))
3656 bt_clear_reg(bt, dreg);
3672 bt_set_frame_slot(bt, fr, spi);
3674 if (bt_is_reg_set(bt, dreg))
3685 if (!bt_is_frame_slot_set(bt, fr, spi))
3687 bt_clear_frame_slot(bt, fr, spi);
3689 bt_set_reg(bt, sreg);
3711 if (bt_reg_mask(bt) & BPF_REGMASK_ARGS) {
3712 verbose(env, "BUG regs %x\n", bt_reg_mask(bt));
3717 bt_clear_reg(bt, BPF_REG_0);
3726 if (bt_reg_mask(bt) & ~BPF_REGMASK_ARGS) {
3727 verbose(env, "BUG regs %x\n", bt_reg_mask(bt));
3734 if (bt_stack_mask(bt) != 0) {
3735 verbose(env, "BUG stack slots %llx\n", bt_stack_mask(bt));
3741 if (bt_is_reg_set(bt, i)) {
3742 bt_clear_reg(bt, i);
3743 bt_set_frame_reg(bt, bt->frame - 1, i);
3746 if (bt_subprog_exit(bt))
3758 if (bt_reg_mask(bt) & ~BPF_REGMASK_ARGS) {
3759 verbose(env, "BUG regs %x\n", bt_reg_mask(bt));
3763 if (bt_stack_mask(bt) != 0) {
3764 verbose(env, "BUG stack slots %llx\n", bt_stack_mask(bt));
3770 bt_clear_reg(bt, i);
3771 if (bt_subprog_exit(bt))
3782 bt_clear_reg(bt, BPF_REG_0);
3783 if (bt_reg_mask(bt) & BPF_REGMASK_ARGS) {
3787 verbose(env, "BUG regs %x\n", bt_reg_mask(bt));
3804 bt_clear_reg(bt, i);
3805 if (bt_reg_mask(bt) & BPF_REGMASK_ARGS) {
3806 verbose(env, "BUG regs %x\n", bt_reg_mask(bt));
3821 bt_is_reg_set(bt, BPF_REG_0);
3823 bt_clear_reg(bt, BPF_REG_0);
3824 if (bt_subprog_enter(bt))
3828 bt_set_reg(bt, BPF_REG_0);
3834 if (!bt_is_reg_set(bt, dreg) && !bt_is_reg_set(bt, sreg))
3842 bt_set_reg(bt, dreg);
3843 bt_set_reg(bt, sreg);
3851 if (!bt_is_reg_set(bt, dreg))
3853 bt_clear_reg(bt, dreg);
4013 /* Collect a set of IDs for all registers currently marked as precise in env->bt.
4019 struct backtrack_state *bt = &env->bt;
4027 for (fr = bt->frame; fr >= 0; fr--) {
4030 bitmap_from_u64(mask, bt_frame_reg_mask(bt, fr));
4039 bitmap_from_u64(mask, bt_frame_stack_mask(bt, fr));
4062 bt_set_frame_reg(bt, fr, i);
4072 bt_set_frame_slot(bt, fr, i);
4168 struct backtrack_state *bt = &env->bt;
4182 bt_init(bt, env->cur_state->curframe);
4188 func = st->frame[bt->frame];
4195 bt_set_reg(bt, regno);
4198 if (bt_empty(bt))
4208 bt->frame, last_idx, first_idx, subseq_idx);
4245 bt_stack_mask(bt) == 0 &&
4246 (bt_reg_mask(bt) & ~BPF_REGMASK_ARGS) == 0) {
4247 bitmap_from_u64(mask, bt_reg_mask(bt));
4250 bt_clear_reg(bt, i);
4258 st->frame[0]->subprogno, bt_reg_mask(bt), bt_stack_mask(bt));
4269 err = backtrack_insn(env, i, subseq_idx, hist, bt);
4273 bt_reset(bt);
4278 if (bt_empty(bt))
4304 for (fr = bt->frame; fr >= 0; fr--) {
4306 bitmap_from_u64(mask, bt_frame_reg_mask(bt, fr));
4310 bt_clear_frame_reg(bt, fr, i);
4314 bt_clear_frame_reg(bt, fr, i);
4319 bitmap_from_u64(mask, bt_frame_stack_mask(bt, fr));
4329 bt_clear_frame_slot(bt, fr, i);
4334 bt_clear_frame_slot(bt, fr, i);
4340 bt_frame_reg_mask(bt, fr));
4344 bt_frame_stack_mask(bt, fr));
4350 if (bt_empty(bt))
4362 if (!bt_empty(bt)) {
4364 bt_reset(bt);
4375 /* mark_chain_precision_batch() assumes that env->bt is set in the caller to
17187 bt_set_frame_reg(&env->bt, fr, i);
17206 bt_set_frame_slot(&env->bt, fr, i);
21501 env->bt.env = env;