Searched refs:this_cpu (Results 1 - 25 of 62) sorted by last modified time

123

/linux-master/kernel/sched/
H A Dfair.c6010 int this_cpu = smp_processor_id(); local
6051 if (cpu_of(rq) != this_cpu) {
7023 wake_affine_idle(int this_cpu, int prev_cpu, int sync) argument
7026 * If this_cpu is idle, it implies the wakeup is from interrupt
7037 if (available_idle_cpu(this_cpu) && cpus_share_cache(this_cpu, prev_cpu))
7038 return available_idle_cpu(prev_cpu) ? prev_cpu : this_cpu;
7040 if (sync && cpu_rq(this_cpu)->nr_running == 1)
7041 return this_cpu;
7051 int this_cpu, in
7050 wake_affine_weight(struct sched_domain *sd, struct task_struct *p, int this_cpu, int prev_cpu, int sync) argument
7092 wake_affine(struct sched_domain *sd, struct task_struct *p, int this_cpu, int prev_cpu, int sync) argument
7119 find_idlest_group_cpu(struct sched_group *group, struct task_struct *p, int this_cpu) argument
10319 find_idlest_group(struct sched_domain *sd, struct task_struct *p, int this_cpu) argument
11259 load_balance(int this_cpu, struct rq *this_rq, struct sched_domain *sd, enum cpu_idle_type idle, int *continue_balancing) argument
12100 int this_cpu = this_rq->cpu; local
12244 int this_cpu = this_rq->cpu; local
12292 int this_cpu = this_rq->cpu; local
[all...]
H A Dcore.c3956 bool cpus_equal_capacity(int this_cpu, int that_cpu) argument
3961 if (this_cpu == that_cpu)
3964 return arch_scale_cpu_capacity(this_cpu) == arch_scale_cpu_capacity(that_cpu);
3967 bool cpus_share_cache(int this_cpu, int that_cpu) argument
3969 if (this_cpu == that_cpu)
3972 return per_cpu(sd_llc_id, this_cpu) == per_cpu(sd_llc_id, that_cpu);
3979 bool cpus_share_resources(int this_cpu, int that_cpu) argument
3981 if (this_cpu == that_cpu)
3984 return per_cpu(sd_share_id, this_cpu) == per_cpu(sd_share_id, that_cpu);
/linux-master/arch/x86/kernel/cpu/
H A Dcommon.c196 static const struct cpu_dev *this_cpu = &default_cpu; variable in typeref:struct:cpu_dev
689 if (!this_cpu)
692 info = this_cpu->legacy_models;
833 if (this_cpu->legacy_cache_size)
834 l2size = this_cpu->legacy_cache_size(c, l2size);
857 if (this_cpu->c_detect_tlb)
858 this_cpu->c_detect_tlb(c);
882 this_cpu = cpu_devs[i];
883 c->x86_vendor = this_cpu->c_x86_vendor;
892 this_cpu
[all...]
H A Dmshyperv.c267 unsigned int old_cpu, this_cpu; local
273 this_cpu = raw_smp_processor_id();
274 if (!atomic_try_cmpxchg(&nmi_cpu, &old_cpu, this_cpu))
/linux-master/arch/x86/hyperv/
H A Dhv_apic.c160 int cur_cpu, vcpu, this_cpu = smp_processor_id(); local
175 (exclude_self && weight == 1 && cpumask_test_cpu(this_cpu, mask)))
204 if (exclude_self && cur_cpu == this_cpu)
/linux-master/arch/x86/kernel/
H A Dkvm.c565 unsigned int this_cpu = smp_processor_id(); local
570 cpumask_clear_cpu(this_cpu, new_mask);
H A Dsmp.c151 unsigned int old_cpu, this_cpu; local
159 this_cpu = smp_processor_id();
160 if (!atomic_try_cmpxchg(&stopping_cpu, &old_cpu, this_cpu))
191 cpumask_clear_cpu(this_cpu, &cpus_stop_mask);
H A Dtsc.c214 unsigned int cpu, this_cpu = smp_processor_id(); local
219 if (cpu != this_cpu) {
H A Dprocess.c505 unsigned int this_cpu = smp_processor_id(); local
523 for_each_cpu(cpu, topology_sibling_cpumask(this_cpu)) {
524 if (cpu == this_cpu)
/linux-master/arch/sparc/kernel/
H A Dnmi.c75 int this_cpu = smp_processor_id(); local
82 panic("Watchdog detected hard LOCKUP on cpu %d", this_cpu);
84 WARN(1, "Watchdog detected hard LOCKUP on cpu %d", this_cpu);
H A Dirq_64.c1003 void notrace sun4v_register_mondo_queues(int this_cpu) argument
1005 struct trap_per_cpu *tb = &trap_block[this_cpu];
H A Dchmc.c593 unsigned long ret, this_cpu; local
597 this_cpu = real_hard_smp_processor_id();
599 if (p->portid == this_cpu) {
/linux-master/tools/perf/
H A Dbuiltin-sched.c1551 struct perf_cpu this_cpu = { local
1559 BUG_ON(this_cpu.cpu >= MAX_CPUS || this_cpu.cpu < 0);
1561 if (this_cpu.cpu > sched->max_cpu.cpu)
1562 sched->max_cpu = this_cpu;
1566 if (!__test_and_set_bit(this_cpu.cpu, sched->map.comp_cpus_mask)) {
1567 sched->map.comp_cpus[cpus_nr++] = this_cpu;
1573 timestamp0 = sched->cpu_last_switched[this_cpu.cpu];
1574 sched->cpu_last_switched[this_cpu.cpu] = timestamp;
1595 sched->curr_thread[this_cpu
1692 int this_cpu = sample->cpu, err = 0; local
2937 struct perf_cpu this_cpu = { local
[all...]
/linux-master/kernel/
H A Dwatchdog.c153 unsigned int this_cpu = smp_processor_id(); local
182 if (cpu == this_cpu) {
H A Dpanic.c196 int old_cpu, this_cpu; local
199 this_cpu = raw_smp_processor_id();
202 if (atomic_try_cmpxchg(&panic_cpu, &old_cpu, this_cpu))
204 else if (old_cpu != this_cpu)
287 int old_cpu, this_cpu; local
321 * `old_cpu == this_cpu' means we came from nmi_panic() which sets
325 this_cpu = raw_smp_processor_id();
328 if (atomic_try_cmpxchg(&panic_cpu, &old_cpu, this_cpu)) {
330 } else if (old_cpu != this_cpu)
H A Dcrash_core.c129 int old_cpu, this_cpu; local
137 this_cpu = raw_smp_processor_id();
139 if (atomic_try_cmpxchg(&panic_cpu, &old_cpu, this_cpu)) {
/linux-master/drivers/gpu/drm/i915/
H A Di915_request.c1893 unsigned int this_cpu; local
1895 if (time_after(local_clock_ns(&this_cpu), timeout))
1898 return this_cpu != cpu;
/linux-master/drivers/cpufreq/
H A Dintel_pstate.c1638 unsigned int this_cpu = smp_processor_id(); local
1652 if (!cpumask_test_cpu(this_cpu, &hwp_intr_enable_mask))
1668 cpudata = READ_ONCE(all_cpu_data[this_cpu]);
/linux-master/arch/sparc/mm/
H A Dinit_64.c229 static inline void set_dcache_dirty(struct folio *folio, int this_cpu) argument
231 unsigned long mask = this_cpu;
299 int this_cpu = get_cpu(); local
304 if (cpu == this_cpu)
467 int this_cpu; local
479 this_cpu = get_cpu();
487 if (dirty_cpu == this_cpu)
491 set_dcache_dirty(folio, this_cpu);
/linux-master/arch/s390/kernel/
H A Dmachine_kexec.c82 int this_cpu, cpu; local
86 this_cpu = smp_find_processor_id(stap());
88 if (cpu == this_cpu)
/linux-master/arch/arm/kernel/
H A Dmachine_kexec.c105 int cpu, this_cpu = raw_smp_processor_id(); local
112 if (cpu == this_cpu)
/linux-master/include/linux/sched/
H A Dtopology.h179 bool cpus_equal_capacity(int this_cpu, int that_cpu);
180 bool cpus_share_cache(int this_cpu, int that_cpu);
181 bool cpus_share_resources(int this_cpu, int that_cpu);
230 static inline bool cpus_equal_capacity(int this_cpu, int that_cpu) argument
235 static inline bool cpus_share_cache(int this_cpu, int that_cpu) argument
240 static inline bool cpus_share_resources(int this_cpu, int that_cpu) argument
/linux-master/arch/x86/xen/
H A Dsmp.c236 unsigned int this_cpu = smp_processor_id(); local
243 if (this_cpu == cpu)
/linux-master/arch/x86/lib/
H A Dmsr-smp.c102 int this_cpu; local
109 this_cpu = get_cpu();
111 if (cpumask_test_cpu(this_cpu, mask))
/linux-master/arch/x86/kernel/apic/
H A Dx2apic_uv_x.c723 unsigned int this_cpu = smp_processor_id(); local
727 if (cpu != this_cpu)
734 unsigned int this_cpu = smp_processor_id(); local
738 if (cpu != this_cpu)

Completed in 415 milliseconds

123