Lines Matching refs:part

9 // This file is a part of ThreadSanitizer (TSan), a race detector.
80 TracePart* part = nullptr;
87 part = ctx->trace_part_recycle.PopFront();
88 DPrintf("#%d: TracePartAlloc: part=%p\n", thr->tid, part);
89 if (part && part->trace) {
90 Trace* trace1 = part->trace;
92 part->trace = nullptr;
94 CHECK_EQ(part, part1);
107 if (!part)
112 if (!part)
113 part = new (MmapOrDie(sizeof(*part), "TracePart")) TracePart();
114 return part;
117 static void TracePartFree(TracePart* part) SANITIZER_REQUIRES(ctx->slot_mtx) {
118 DCHECK(part->trace);
119 part->trace = nullptr;
120 ctx->trace_part_recycle.PushFront(part);
125 while (auto* part = ctx->trace_part_recycle.PopFront()) {
126 if (auto trace = part->trace)
127 CHECK_EQ(trace->parts.PopFront(), part);
128 UnmapOrDie(part, sizeof(*part));
155 auto part = parts->Front();
156 local = local || part == trace->local_head;
158 CHECK(!ctx->trace_part_recycle.Queued(part));
160 ctx->trace_part_recycle.Remove(part);
162 // The thread is running and this is the last/current part.
163 // Set the trace position to the end of the current part
165 // to a new slot and allocate a new trace part.
168 // within this part, because switching parts is protected by
172 reinterpret_cast<uptr>(&part->events[TracePart::kSize]));
175 parts->Remove(part);
176 TracePartFree(part);
325 TracePart* part = nullptr;
334 part = parts->PopFront();
339 if (part) {
341 TracePartFree(part);
913 auto *part = trace->parts.Back();
915 trace, trace->parts.Front(), part, pos);
916 if (!part)
918 // We can get here when we still have space in the current trace part.
920 // the part. Check if we are indeed at the end of the current part or not,
922 Event* end = &part->events[TracePart::kSize];
923 DCHECK_GE(pos, &part->events[0]);
946 TracePart* part = thr->tctx->trace.parts.Back();
947 if (part) {
949 reinterpret_cast<uptr>(&part->events[0]));
960 TracePart* part = TracePartAlloc(thr);
961 part->trace = trace;
970 trace->local_head = part;
975 trace->parts.PushBack(part);
977 reinterpret_cast<uptr>(&part->events[0]));
979 // Make this part self-sufficient by restoring the current stack
983 // Pathologically large stacks may not fit into the part.
986 // Check that kMaxFrames won't consume the whole part.
1004 // filled the trace part exactly up to the TracePart::kAlignment gap