Searched refs:per_cpu (Results 1 - 25 of 397) sorted by relevance

1234567891011>>

/linux-master/tools/testing/selftests/ftrace/test.d/00basic/
H A Dringbuffer_size.tc19 if [ -d per_cpu/cpu0 ]; then
20 cd per_cpu/cpu0
/linux-master/arch/x86/xen/
H A Dsmp.c35 kfree(per_cpu(xen_resched_irq, cpu).name);
36 per_cpu(xen_resched_irq, cpu).name = NULL;
37 if (per_cpu(xen_resched_irq, cpu).irq >= 0) {
38 unbind_from_irqhandler(per_cpu(xen_resched_irq, cpu).irq, NULL);
39 per_cpu(xen_resched_irq, cpu).irq = -1;
41 kfree(per_cpu(xen_callfunc_irq, cpu).name);
42 per_cpu(xen_callfunc_irq, cpu).name = NULL;
43 if (per_cpu(xen_callfunc_irq, cpu).irq >= 0) {
44 unbind_from_irqhandler(per_cpu(xen_callfunc_irq, cpu).irq, NULL);
45 per_cpu(xen_callfunc_ir
[all...]
H A Dspinlock.c25 int irq = per_cpu(lock_kicker_irq, cpu);
74 WARN(per_cpu(lock_kicker_irq, cpu) >= 0, "spinlock on CPU%d exists on IRQ%d!\n",
75 cpu, per_cpu(lock_kicker_irq, cpu));
78 per_cpu(irq_name, cpu) = name;
88 per_cpu(lock_kicker_irq, cpu) = irq;
101 kfree(per_cpu(irq_name, cpu));
102 per_cpu(irq_name, cpu) = NULL;
107 irq = per_cpu(lock_kicker_irq, cpu);
112 per_cpu(lock_kicker_irq, cpu) = -1;
H A Dsmp_pv.c101 kfree(per_cpu(xen_irq_work, cpu).name);
102 per_cpu(xen_irq_work, cpu).name = NULL;
103 if (per_cpu(xen_irq_work, cpu).irq >= 0) {
104 unbind_from_irqhandler(per_cpu(xen_irq_work, cpu).irq, NULL);
105 per_cpu(xen_irq_work, cpu).irq = -1;
108 kfree(per_cpu(xen_pmu_irq, cpu).name);
109 per_cpu(xen_pmu_irq, cpu).name = NULL;
110 if (per_cpu(xen_pmu_irq, cpu).irq >= 0) {
111 unbind_from_irqhandler(per_cpu(xen_pmu_irq, cpu).irq, NULL);
112 per_cpu(xen_pmu_ir
[all...]
/linux-master/kernel/sched/
H A Dcpufreq.c36 if (WARN_ON(per_cpu(cpufreq_update_util_data, cpu)))
40 rcu_assign_pointer(per_cpu(cpufreq_update_util_data, cpu), data);
56 rcu_assign_pointer(per_cpu(cpufreq_update_util_data, cpu), NULL);
/linux-master/arch/arm/mach-alpine/
H A Dalpine_cpu_resume.h22 struct al_cpu_resume_regs_per_cpu per_cpu[]; member in struct:al_cpu_resume_regs
/linux-master/arch/mips/kernel/
H A Dmips-cpc.c77 spin_lock_init(&per_cpu(cpc_core_lock, cpu));
100 spin_lock_irqsave(&per_cpu(cpc_core_lock, curr_core),
101 per_cpu(cpc_core_lock_flags, curr_core));
120 spin_unlock_irqrestore(&per_cpu(cpc_core_lock, curr_core),
121 per_cpu(cpc_core_lock_flags, curr_core));
/linux-master/arch/x86/include/asm/
H A Dstackprotector.h69 per_cpu(fixed_percpu_data.stack_canary, cpu) = idle->stack_canary;
71 per_cpu(__stack_chk_guard, cpu) = idle->stack_canary;
/linux-master/arch/x86/kernel/apic/
H A Dapic_common.c13 return per_cpu(x86_cpu_to_apicid, cpu);
24 return (int)per_cpu(x86_cpu_to_apicid, mps_cpu);
/linux-master/arch/sparc/include/asm/
H A Dcpudata_32.h29 #define cpu_data(__cpu) per_cpu(__cpu_data, (__cpu))
H A Dcpudata_64.h35 #define cpu_data(__cpu) per_cpu(__cpu_data, (__cpu))
/linux-master/arch/parisc/kernel/
H A Dtopology.c40 per_cpu(cpu_devices, cpuid).hotpluggable = 1;
42 if (register_cpu(&per_cpu(cpu_devices, cpuid), cpuid))
49 p = &per_cpu(cpu_data, cpuid);
51 const struct cpuinfo_parisc *cpuinfo = &per_cpu(cpu_data, cpu);
/linux-master/arch/riscv/kernel/
H A Dirq.c52 per_cpu(irq_shadow_call_stack_ptr, cpu) =
66 per_cpu(irq_stack_ptr, cpu) = p;
78 per_cpu(irq_stack_ptr, cpu) = per_cpu(irq_stack, cpu);
/linux-master/arch/arm/mm/
H A Dcontext.c67 asid = per_cpu(active_asids, cpu).counter;
69 asid = per_cpu(reserved_asids, cpu);
144 asid = atomic64_xchg(&per_cpu(active_asids, i), 0);
153 asid = per_cpu(reserved_asids, i);
155 per_cpu(reserved_asids, i) = asid;
180 if (per_cpu(reserved_asids, cpu) == asid) {
182 per_cpu(reserved_asids, cpu) = newasid;
254 && atomic64_xchg(&per_cpu(active_asids, cpu), asid))
270 atomic64_set(&per_cpu(active_asids, cpu), asid);
/linux-master/arch/powerpc/kernel/
H A Dirq.c101 seq_printf(p, "%10u ", per_cpu(irq_stat, j).timer_irqs_event);
106 seq_printf(p, "%10u ", per_cpu(irq_stat, j).broadcast_irqs_event);
111 seq_printf(p, "%10u ", per_cpu(irq_stat, j).timer_irqs_others);
116 seq_printf(p, "%10u ", per_cpu(irq_stat, j).spurious_irqs);
121 seq_printf(p, "%10u ", per_cpu(irq_stat, j).pmu_irqs);
126 seq_printf(p, "%10u ", per_cpu(irq_stat, j).mce_exceptions);
140 seq_printf(p, "%10u ", per_cpu(irq_stat, j).sreset_irqs);
146 seq_printf(p, "%10u ", per_cpu(irq_stat, j).soft_nmi_irqs);
154 seq_printf(p, "%10u ", per_cpu(irq_stat, j).doorbell_irqs);
167 u64 sum = per_cpu(irq_sta
[all...]
/linux-master/drivers/cpufreq/
H A Dspeedstep-centrino.c261 per_cpu(centrino_model, policy->cpu) = model;
296 if ((per_cpu(centrino_cpu, cpu) == &cpu_ids[CPU_BANIAS]) ||
297 (per_cpu(centrino_cpu, cpu) == &cpu_ids[CPU_DOTHAN_A1]) ||
298 (per_cpu(centrino_cpu, cpu) == &cpu_ids[CPU_DOTHAN_B0])) {
303 if ((!per_cpu(centrino_model, cpu)) ||
304 (!per_cpu(centrino_model, cpu)->op_points))
309 per_cpu(centrino_model, cpu)->op_points[i].frequency
312 if (msr == per_cpu(centrino_model, cpu)->op_points[i].driver_data)
313 return per_cpu(centrino_model, cpu)->
317 return per_cpu(centrino_mode
[all...]
/linux-master/drivers/perf/
H A Darm_pmu_acpi.c220 per_cpu(pmu_irqs, cpu) = irq;
230 irq = per_cpu(pmu_irqs, cpu);
241 if (per_cpu(pmu_irqs, irq_cpu) == irq)
242 per_cpu(pmu_irqs, irq_cpu) = 0;
256 pmu = per_cpu(probed_pmus, cpu);
279 int other_irq = per_cpu(hw_events->irq, cpu);
298 int irq = per_cpu(pmu_irqs, cpu);
300 per_cpu(probed_pmus, cpu) = pmu;
305 per_cpu(hw_events->irq, cpu) = irq;
325 if (per_cpu(probed_pmu
[all...]
/linux-master/kernel/locking/
H A Dqspinlock_stat.h52 sum += per_cpu(lockevents[id], cpu);
60 kicks += per_cpu(EVENT_COUNT(pv_kick_unlock), cpu);
64 kicks += per_cpu(EVENT_COUNT(pv_kick_wake), cpu);
112 per_cpu(pv_kick_time, cpu) = start;
/linux-master/arch/arm/include/asm/
H A Dsystem_misc.h23 harden_branch_predictor_fn_t fn = per_cpu(harden_branch_predictor_fn,
/linux-master/arch/arm64/kernel/
H A Dirq.c50 per_cpu(irq_shadow_call_stack_ptr, cpu) =
62 per_cpu(irq_stack_ptr, cpu) = p;
74 per_cpu(irq_stack_ptr, cpu) = per_cpu(irq_stack, cpu);
H A Dsdei.c57 p = per_cpu(*ptr, cpu);
59 per_cpu(*ptr, cpu) = NULL;
84 per_cpu(*ptr, cpu) = p;
116 s = per_cpu(*ptr, cpu);
118 per_cpu(*ptr, cpu) = NULL;
140 per_cpu(*ptr, cpu) = s;
/linux-master/include/linux/
H A Darch_topology.h25 return per_cpu(cpu_scale, cpu);
34 return per_cpu(capacity_freq_ref, cpu);
41 return per_cpu(arch_freq_scale, cpu);
67 return per_cpu(thermal_pressure, cpu);
/linux-master/arch/x86/kernel/
H A Dirq_64.c53 per_cpu(pcpu_hot.hardirq_stack_ptr, cpu) = va + IRQ_STACK_SIZE - 8;
66 per_cpu(pcpu_hot.hardirq_stack_ptr, cpu) = va + IRQ_STACK_SIZE - 8;
73 if (per_cpu(pcpu_hot.hardirq_stack_ptr, cpu))
/linux-master/kernel/
H A Dprofile.c175 per_cpu(cpu_profile_flip, cpu) = !per_cpu(cpu_profile_flip, cpu);
183 j = per_cpu(cpu_profile_flip, get_cpu());
187 struct profile_hit *hits = per_cpu(cpu_profile_hits, cpu)[j];
206 i = per_cpu(cpu_profile_flip, get_cpu());
210 struct profile_hit *hits = per_cpu(cpu_profile_hits, cpu)[i];
226 hits = per_cpu(cpu_profile_hits, cpu)[per_cpu(cpu_profile_flip, cpu)];
274 if (per_cpu(cpu_profile_hits, cpu)[i]) {
275 page = virt_to_page(per_cpu(cpu_profile_hit
[all...]
/linux-master/arch/powerpc/include/asm/
H A Dsmp.h116 return per_cpu(cpu_sibling_map, cpu);
121 return per_cpu(cpu_core_map, cpu);
126 return per_cpu(cpu_l2_cache_map, cpu);
131 return per_cpu(cpu_smallcore_map, cpu);
145 return per_cpu(cpu_smallcore_map, cpu);
147 return per_cpu(cpu_sibling_map, cpu);

Completed in 283 milliseconds

1234567891011>>