Lines Matching defs:check_state

1951 	struct btree_check_state *check_state = info->state;
1952 struct cache_set *c = check_state->c;
1973 spin_lock(&check_state->idx_lock);
1974 cur_idx = check_state->key_idx;
1975 check_state->key_idx++;
1976 spin_unlock(&check_state->idx_lock);
1992 atomic_set(&check_state->enough, 1);
1993 /* Update check_state->enough earlier */
2027 /* update check_state->started among all CPUs */
2029 if (atomic_dec_and_test(&check_state->started))
2030 wake_up(&check_state->wait);
2055 struct btree_check_state check_state;
2066 memset(&check_state, 0, sizeof(struct btree_check_state));
2067 check_state.c = c;
2068 check_state.total_threads = bch_btree_chkthread_nr();
2069 check_state.key_idx = 0;
2070 spin_lock_init(&check_state.idx_lock);
2071 atomic_set(&check_state.started, 0);
2072 atomic_set(&check_state.enough, 0);
2073 init_waitqueue_head(&check_state.wait);
2078 * if check_state.enough is non-zero, it means current
2082 for (i = 0; i < check_state.total_threads; i++) {
2083 /* fetch latest check_state.enough earlier */
2085 if (atomic_read(&check_state.enough))
2088 check_state.infos[i].result = 0;
2089 check_state.infos[i].state = &check_state;
2091 check_state.infos[i].thread =
2093 &check_state.infos[i],
2095 if (IS_ERR(check_state.infos[i].thread)) {
2098 kthread_stop(check_state.infos[i].thread);
2102 atomic_inc(&check_state.started);
2108 wait_event(check_state.wait, atomic_read(&check_state.started) == 0);
2110 for (i = 0; i < check_state.total_threads; i++) {
2111 if (check_state.infos[i].result) {
2112 ret = check_state.infos[i].result;