Searched refs:gCPU (Results 1 - 25 of 34) sorted by relevance

12

/haiku/src/tests/add-ons/kernel/kernelland_emu/
H A Dsmp.cpp19 cpu_ent gCPU[8]; variable
/haiku/src/add-ons/kernel/cpu/x86/
H A Damd.cpp26 if (gCPU[0].arch.vendor != VENDOR_AMD)
31 if (gCPU[0].arch.family < 5
32 || (gCPU[0].arch.family == 5 && gCPU[0].arch.model < 9))
H A Dintel.cpp26 if (gCPU[0].arch.vendor != VENDOR_INTEL)
H A Dvia.cpp44 if (gCPU[0].arch.vendor != VENDOR_CENTAUR)
/haiku/src/system/kernel/
H A Dcpu.cpp30 cpu_ent gCPU[SMP_MAX_CPUS]; variable
150 memset(&gCPU[curr_cpu], 0, sizeof(gCPU[curr_cpu]));
151 gCPU[curr_cpu].cpu_num = curr_cpu;
153 list_init(&gCPU[curr_cpu].irqs);
154 B_INITIALIZE_SPINLOCK(&gCPU[curr_cpu].irqs_lock);
170 count = acquire_read_seqlock(&gCPU[cpu].active_time_lock);
171 activeTime = gCPU[cpu].active_time;
172 } while (!release_read_seqlock(&gCPU[cpu].active_time_lock, count));
264 maxID[j] = max_c(maxID[j], gCPU[
[all...]
H A Dint.cpp423 } while (gCPU[node->id].disabled);
473 cpu_ent* cpu = &gCPU[cpuID];
583 cpu = &gCPU[oldCPU];
738 cpu_ent* cpu = &gCPU[oldCPU];
747 cpu = &gCPU[newCPU];
H A Dsystem_info.cpp61 gCPU[i].active_time, gCPU[i].interrupt_time, gCPU[i].irq_time);
537 localInfo[localIdx].enabled = !gCPU[cpuIdx].disabled;
H A Dsmp.cpp839 atomic_add(&gCPU[currentCPU].ici_counter, 1);
1153 atomic_add(&gCPU[i].ici_counter, 1);
1219 atomic_add(&gCPU[currentCPU].ici_counter, 1);
1285 atomic_add(&gCPU[currentCPU].ici_counter, 1);
/haiku/src/system/kernel/arch/x86/
H A Darch_system_info.cpp154 bigtime_t timestamp = gCPU[cpu].arch.perf_timestamp;
157 *frequency = gCPU[cpu].arch.frequency;
161 uint64 mperf = gCPU[cpu].arch.mperf_prev;
162 uint64 aperf = gCPU[cpu].arch.aperf_prev;
170 gCPU[cpu].arch.mperf_prev = mperf2;
171 gCPU[cpu].arch.aperf_prev = aperf2;
172 gCPU[cpu].arch.perf_timestamp = timestamp2;
173 gCPU[cpu].arch.frequency = *frequency;
H A Dapic.cpp323 gCPU[cpu].arch.logical_apic_id = apic_logical_apic_id();
325 gCPU[cpu].arch.logical_apic_id >>= 24;
327 gCPU[cpu].arch.logical_apic_id);
H A Darch_smp.cpp191 apic_set_interrupt_command(gCPU[i].arch.logical_apic_id, mode);
H A Darch_int.cpp274 cpu_ent* cpu = &gCPU[smp_get_current_cpu()];
/haiku/src/system/kernel/arch/x86/32/
H A Dsyscalls.cpp44 if (!(gCPU[i].arch.feature[type] & feature))
90 && !(gCPU[0].arch.family == 6 && gCPU[0].arch.model < 3
91 && gCPU[0].arch.stepping < 3)) {
H A Ddescriptors.cpp154 struct tss *tss = &gCPU[cpu].arch.tss;
186 cpu_ent& cpu = gCPU[x86_double_fault_get_cpu()];
211 struct tss* tss = &gCPU[cpuNum].arch.double_fault_tss;
260 memset(&gCPU[cpu].arch.tss, 0, sizeof(struct tss));
261 gCPU[cpu].arch.tss.ss0 = (KERNEL_DATA_SEGMENT << 3) | DPL_KERNEL;
262 gCPU[cpu].arch.tss.io_map_base = sizeof(struct tss);
265 set_tss_descriptor(&gdt[TSS_SEGMENT], (addr_t)&gCPU[cpu].arch.tss,
272 (addr_t)&gCPU[cpu].arch.kernel_tls, sizeof(void*), DT_DATA_WRITEABLE,
/haiku/src/system/kernel/arch/riscv64/
H A Darch_smp.cpp51 hartMask |= (uint64)1 << gCPU[i].arch.hartId;
74 sbi_send_ipi((uint64)1 << gCPU[target_cpu].arch.hartId, 0);
H A Darch_cpu.cpp52 cpu_ent* cpu = &gCPU[curCpu];
/haiku/headers/private/kernel/
H A Dcpu.h91 extern cpu_ent gCPU[];
108 extern inline cpu_ent *get_cpu_struct(void) { return &gCPU[smp_get_current_cpu()]; }
H A Dkscheduler.h109 if (gCPU[smp_get_current_cpu()].invoke_scheduler)
/haiku/src/system/kernel/scheduler/
H A Dscheduler.cpp136 gCPU[targetCPU->ID()].invoke_scheduler = true;
324 gCPU[thisCPU].invoke_scheduler = false;
342 oldThreadData->SetStolenInterruptTime(gCPU[thisCPU].interrupt_time);
382 if (gCPU[thisCPU].disabled) {
407 ASSERT(!gCPU[thisCPU].disabled || nextThreadData->IsIdle());
495 thread->previous_cpu = &gCPU[cpuID];
574 gCPU[cpuID].disabled = !enabled;
631 if (gCPU[i].topology_id[CPU_TOPOLOGY_SMT] == 0)
637 if (gCPU[i].topology_id[CPU_TOPOLOGY_SMT] == 0
638 && gCPU[
[all...]
H A Dscheduler_cpu.cpp111 cpu_ent* entry = &gCPU[fCPUNumber];
184 ASSERT(!gCPU[fCPUNumber].disabled);
204 ASSERT(!gCPU[fCPUNumber].disabled);
263 cpu_ent* cpuEntry = &gCPU[fCPUNumber];
300 cpu_ent* cpu = &gCPU[ID()];
323 if (gCPU[fCPUNumber].disabled) {
/haiku/src/system/kernel/arch/x86/64/
H A Ddescriptors.cpp355 cpu_ent* cpu = &gCPU[smp_get_current_cpu()];
391 memset(&gCPU[cpu].arch.tss, 0, sizeof(struct tss));
392 gCPU[cpu].arch.tss.io_map_base = sizeof(struct tss);
395 struct tss* tss = &gCPU[cpu].arch.tss;
402 TSSDescriptor(uintptr_t(&gCPU[cpu].arch.tss), sizeof(struct tss)));
/haiku/src/add-ons/kernel/power/cpufreq/amd_pstates/
H A Damd_pstates.cpp93 if (!is_cpu_model_supported(&gCPU[i]))
/haiku/src/add-ons/kernel/power/cpufreq/intel_pstates/
H A Dintel_pstates.cpp242 if (!is_cpu_model_supported(&gCPU[i]))
/haiku/src/system/kernel/arch/arm64/
H A Darch_int.cpp286 cpu_ent* cpu = &gCPU[smp_get_current_cpu()];
/haiku/src/system/kernel/debug/
H A Ddebug.cpp846 if (thread->cpu != gCPU + cpu) {
848 thread->cpu, gCPU + cpu);
1912 cpu_ent* cpu = gCPU + sDebuggerOnCPU;
1950 if (debug_call_with_fault_handler(gCPU[sDebuggerOnCPU].fault_jump_buffer,
2010 gCPU[sDebuggerOnCPU].fault_jump_buffer,

Completed in 285 milliseconds

12