Lines Matching refs:thr

79 static TracePart* TracePartAlloc(ThreadState* thr) {
84 Trace* trace = &thr->tctx->trace;
88 DPrintf("#%d: TracePartAlloc: part=%p\n", thr->tid, part);
151 bool attached = tctx->thr && tctx->thr->slot;
171 &tctx->thr->trace_pos,
180 if (tctx->thr && !tctx->thr->slot) {
181 atomic_store_relaxed(&tctx->thr->trace_pos, 0);
182 tctx->thr->trace_prev_pc = 0;
195 slot.thr = nullptr;
232 void DoReset(ThreadState* thr, uptr epoch) SANITIZER_NO_THREAD_SAFETY_ANALYSIS {
244 DPrintf("#%d: DoReset epoch=%lu\n", thr ? thr->tid : -1, epoch);
251 static TidSlot* FindSlotAndLock(ThreadState* thr)
252 SANITIZER_ACQUIRE(thr->slot->mtx) SANITIZER_NO_THREAD_SAFETY_ANALYSIS {
253 CHECK(!thr->slot);
264 thr->slot_locked = false;
278 DoReset(thr, epoch);
282 CHECK(!thr->slot_locked);
283 thr->slot_locked = true;
284 if (slot->thr) {
285 DPrintf("#%d: preempting sid=%d tid=%d\n", thr->tid, (u32)slot->sid,
286 slot->thr->tid);
287 slot->SetEpoch(slot->thr->fast_state.epoch());
288 slot->thr = nullptr;
295 void SlotAttachAndLock(ThreadState* thr) {
296 TidSlot* slot = FindSlotAndLock(thr);
297 DPrintf("#%d: SlotAttach: slot=%u\n", thr->tid, static_cast<int>(slot->sid));
298 CHECK(!slot->thr);
299 CHECK(!thr->slot);
300 slot->thr = thr;
301 thr->slot = slot;
305 thr->fast_state.SetSid(slot->sid);
306 thr->fast_state.SetEpoch(epoch);
307 if (thr->slot_epoch != ctx->global_epoch) {
308 thr->slot_epoch = ctx->global_epoch;
309 thr->clock.Reset();
311 thr->last_sleep_stack_id = kInvalidStackID;
312 thr->last_sleep_clock.Reset();
315 thr->clock.Set(slot->sid, epoch);
316 slot->journal.PushBack({thr->tid, epoch});
319 static void SlotDetachImpl(ThreadState* thr, bool exiting) {
320 TidSlot* slot = thr->slot;
321 thr->slot = nullptr;
322 if (thr != slot->thr) {
324 if (thr->slot_epoch != ctx->global_epoch) {
326 auto* trace = &thr->tctx->trace;
335 thr->tctx->trace.local_head = nullptr;
336 atomic_store_relaxed(&thr->trace_pos, 0);
337 thr->trace_prev_pc = 0;
346 CHECK(exiting || thr->fast_state.epoch() == kEpochLast);
347 slot->SetEpoch(thr->fast_state.epoch());
348 slot->thr = nullptr;
351 void SlotDetach(ThreadState* thr) {
352 Lock lock(&thr->slot->mtx);
353 SlotDetachImpl(thr, true);
356 void SlotLock(ThreadState* thr) SANITIZER_NO_THREAD_SAFETY_ANALYSIS {
357 DCHECK(!thr->slot_locked);
365 TidSlot* slot = thr->slot;
367 thr->slot_locked = true;
368 if (LIKELY(thr == slot->thr && thr->fast_state.epoch() != kEpochLast))
370 SlotDetachImpl(thr, false);
371 thr->slot_locked = false;
373 SlotAttachAndLock(thr);
376 void SlotUnlock(ThreadState* thr) {
377 DCHECK(thr->slot_locked);
378 thr->slot_locked = false;
379 thr->slot->mtx.Unlock();
568 void UnmapShadow(ThreadState *thr, uptr addr, uptr size) {
573 SlotLocker locker(thr, true);
574 ctx->metamap.ResetRange(thr->proc(), addr, size, true);
669 ThreadState* thr = cur_thread();
670 thr->nomalloc = false;
671 thr->ignore_sync++;
672 thr->ignore_reads_and_writes++;
673 atomic_store_relaxed(&thr->in_signal_handler, 0);
680 void Initialize(ThreadState *thr) {
708 ProcWire(proc, thr);
731 ThreadStart(thr, tid, GetTid(), ThreadType::Regular);
767 int Finalize(ThreadState *thr) {
775 if (flags()->atexit_sleep_ms > 0 && ThreadCount(thr) > 1)
787 ThreadFinalize(thr);
807 void ForkBefore(ThreadState* thr, uptr pc) SANITIZER_NO_THREAD_SAFETY_ANALYSIS {
811 SlotDetach(thr);
822 thr->suppress_reports++;
827 thr->ignore_interceptors++;
829 thr->ignore_reads_and_writes++;
834 static void ForkAfter(ThreadState* thr) SANITIZER_NO_THREAD_SAFETY_ANALYSIS {
835 thr->suppress_reports--; // Enabled in ForkBefore.
836 thr->ignore_interceptors--;
837 thr->ignore_reads_and_writes--;
843 SlotAttachAndLock(thr);
844 SlotUnlock(thr);
848 void ForkParentAfter(ThreadState* thr, uptr pc) { ForkAfter(thr); }
850 void ForkChildAfter(ThreadState* thr, uptr pc, bool start_thread) {
851 ForkAfter(thr);
852 u32 nthread = ctx->thread_registry.OnFork(thr->tid);
865 thr->ignore_interceptors++;
866 thr->suppress_reports++;
867 ThreadIgnoreBegin(thr, pc);
868 ThreadIgnoreSyncBegin(thr, pc);
875 void GrowShadowStack(ThreadState *thr) {
876 const int sz = thr->shadow_stack_end - thr->shadow_stack;
879 internal_memcpy(newstack, thr->shadow_stack, sz * sizeof(uptr));
880 Free(thr->shadow_stack);
881 thr->shadow_stack = newstack;
882 thr->shadow_stack_pos = newstack + sz;
883 thr->shadow_stack_end = newstack + newsz;
887 StackID CurrentStackId(ThreadState *thr, uptr pc) {
889 if (!thr->is_inited) // May happen during bootstrap.
894 DCHECK_LT(thr->shadow_stack_pos, thr->shadow_stack_end);
896 if (thr->shadow_stack_pos == thr->shadow_stack_end)
897 GrowShadowStack(thr);
899 thr->shadow_stack_pos[0] = pc;
900 thr->shadow_stack_pos++;
903 StackTrace(thr->shadow_stack, thr->shadow_stack_pos - thr->shadow_stack));
905 thr->shadow_stack_pos--;
909 static bool TraceSkipGap(ThreadState* thr) {
910 Trace *trace = &thr->tctx->trace;
911 Event *pos = reinterpret_cast<Event *>(atomic_load_relaxed(&thr->trace_pos));
914 DPrintf("#%d: TraceSwitchPart enter trace=%p parts=%p-%p pos=%p\n", thr->tid,
931 atomic_store_relaxed(&thr->trace_pos, reinterpret_cast<uptr>(pos));
940 void TraceSwitchPart(ThreadState* thr) {
941 if (TraceSkipGap(thr))
946 TracePart* part = thr->tctx->trace.parts.Back();
948 atomic_store_relaxed(&thr->trace_pos,
954 TraceSwitchPartImpl(thr);
957 void TraceSwitchPartImpl(ThreadState* thr) {
958 SlotLocker locker(thr, true);
959 Trace* trace = &thr->tctx->trace;
960 TracePart* part = TracePartAlloc(thr);
962 thr->trace_prev_pc = 0;
976 atomic_store_relaxed(&thr->trace_pos,
981 TraceTime(thr);
988 uptr* pos = Max(&thr->shadow_stack[0], thr->shadow_stack_pos - kMaxFrames);
989 for (; pos < thr->shadow_stack_pos; pos++) {
990 if (TryTraceFunc(thr, *pos))
992 CHECK(TraceSkipGap(thr));
993 CHECK(TryTraceFunc(thr, *pos));
996 for (uptr i = 0; i < thr->mset.Size(); i++) {
997 MutexSet::Desc d = thr->mset.Get(i);
999 TraceMutexLock(thr, d.write ? EventType::kLock : EventType::kRLock, 0,
1007 if (!TraceAcquire(thr, &ev)) {
1008 CHECK(TraceSkipGap(thr));
1009 CHECK(TraceAcquire(thr, &ev));
1021 if (ctx->slot_queue.Queued(thr->slot)) {
1022 ctx->slot_queue.Remove(thr->slot);
1023 ctx->slot_queue.PushBack(thr->slot);
1028 DPrintf("#%d: TraceSwitchPart exit parts=%p-%p pos=0x%zx\n", thr->tid,
1030 atomic_load_relaxed(&thr->trace_pos));
1033 void ThreadIgnoreBegin(ThreadState* thr, uptr pc) {
1034 DPrintf("#%d: ThreadIgnoreBegin\n", thr->tid);
1035 thr->ignore_reads_and_writes++;
1036 CHECK_GT(thr->ignore_reads_and_writes, 0);
1037 thr->fast_state.SetIgnoreBit();
1040 thr->mop_ignore_set.Add(CurrentStackId(thr, pc));
1044 void ThreadIgnoreEnd(ThreadState *thr) {
1045 DPrintf("#%d: ThreadIgnoreEnd\n", thr->tid);
1046 CHECK_GT(thr->ignore_reads_and_writes, 0);
1047 thr->ignore_reads_and_writes--;
1048 if (thr->ignore_reads_and_writes == 0) {
1049 thr->fast_state.ClearIgnoreBit();
1051 thr->mop_ignore_set.Reset();
1059 ThreadState *thr = cur_thread();
1060 return thr->shadow_stack_pos - thr->shadow_stack;
1064 void ThreadIgnoreSyncBegin(ThreadState *thr, uptr pc) {
1065 DPrintf("#%d: ThreadIgnoreSyncBegin\n", thr->tid);
1066 thr->ignore_sync++;
1067 CHECK_GT(thr->ignore_sync, 0);
1070 thr->sync_ignore_set.Add(CurrentStackId(thr, pc));
1074 void ThreadIgnoreSyncEnd(ThreadState *thr) {
1075 DPrintf("#%d: ThreadIgnoreSyncEnd\n", thr->tid);
1076 CHECK_GT(thr->ignore_sync, 0);
1077 thr->ignore_sync--;
1079 if (thr->ignore_sync == 0)
1080 thr->sync_ignore_set.Reset();