Lines Matching refs:coupled
151 int n = dev->coupled->online_count; in cpuidle_coupled_parallel_barrier()
206 static inline void cpuidle_coupled_set_ready(struct cpuidle_coupled *coupled) in cpuidle_coupled_set_ready() argument
208 atomic_add(MAX_WAITING_CPUS, &coupled->ready_waiting_counts); in cpuidle_coupled_set_ready()
226 inline int cpuidle_coupled_set_not_ready(struct cpuidle_coupled *coupled) in cpuidle_coupled_set_not_ready() argument
231 all = coupled->online_count | (coupled->online_count << WAITING_BITS); in cpuidle_coupled_set_not_ready()
232 ret = atomic_add_unless(&coupled->ready_waiting_counts, in cpuidle_coupled_set_not_ready()
244 static inline int cpuidle_coupled_no_cpus_ready(struct cpuidle_coupled *coupled) in cpuidle_coupled_no_cpus_ready() argument
246 int r = atomic_read(&coupled->ready_waiting_counts) >> WAITING_BITS; in cpuidle_coupled_no_cpus_ready()
256 static inline bool cpuidle_coupled_cpus_ready(struct cpuidle_coupled *coupled) in cpuidle_coupled_cpus_ready() argument
258 int r = atomic_read(&coupled->ready_waiting_counts) >> WAITING_BITS; in cpuidle_coupled_cpus_ready()
259 return r == coupled->online_count; in cpuidle_coupled_cpus_ready()
268 static inline bool cpuidle_coupled_cpus_waiting(struct cpuidle_coupled *coupled) in cpuidle_coupled_cpus_waiting() argument
270 int w = atomic_read(&coupled->ready_waiting_counts) & WAITING_MASK; in cpuidle_coupled_cpus_waiting()
271 return w == coupled->online_count; in cpuidle_coupled_cpus_waiting()
280 static inline int cpuidle_coupled_no_cpus_waiting(struct cpuidle_coupled *coupled) in cpuidle_coupled_no_cpus_waiting() argument
282 int w = atomic_read(&coupled->ready_waiting_counts) & WAITING_MASK; in cpuidle_coupled_no_cpus_waiting()
294 struct cpuidle_coupled *coupled) in cpuidle_coupled_get_state() argument
306 for_each_cpu(i, &coupled->coupled_cpus) in cpuidle_coupled_get_state()
307 if (cpu_online(i) && coupled->requested_state[i] < state) in cpuidle_coupled_get_state()
308 state = coupled->requested_state[i]; in cpuidle_coupled_get_state()
348 struct cpuidle_coupled *coupled) in cpuidle_coupled_poke_others() argument
352 for_each_cpu(cpu, &coupled->coupled_cpus) in cpuidle_coupled_poke_others()
367 struct cpuidle_coupled *coupled, int next_state) in cpuidle_coupled_set_waiting() argument
369 coupled->requested_state[cpu] = next_state; in cpuidle_coupled_set_waiting()
375 return atomic_inc_return(&coupled->ready_waiting_counts) & WAITING_MASK; in cpuidle_coupled_set_waiting()
386 struct cpuidle_coupled *coupled) in cpuidle_coupled_set_not_waiting() argument
394 atomic_dec(&coupled->ready_waiting_counts); in cpuidle_coupled_set_not_waiting()
396 coupled->requested_state[cpu] = CPUIDLE_COUPLED_NOT_IDLE; in cpuidle_coupled_set_not_waiting()
408 static void cpuidle_coupled_set_done(int cpu, struct cpuidle_coupled *coupled) in cpuidle_coupled_set_done() argument
410 cpuidle_coupled_set_not_waiting(cpu, coupled); in cpuidle_coupled_set_done()
411 atomic_sub(MAX_WAITING_CPUS, &coupled->ready_waiting_counts); in cpuidle_coupled_set_done()
440 static bool cpuidle_coupled_any_pokes_pending(struct cpuidle_coupled *coupled) in cpuidle_coupled_any_pokes_pending() argument
442 return cpumask_first_and_and(cpu_online_mask, &coupled->coupled_cpus, in cpuidle_coupled_any_pokes_pending()
469 struct cpuidle_coupled *coupled = dev->coupled; in cpuidle_enter_state_coupled() local
472 if (!coupled) in cpuidle_enter_state_coupled()
475 while (coupled->prevent) { in cpuidle_enter_state_coupled()
492 w = cpuidle_coupled_set_waiting(dev->cpu, coupled, next_state); in cpuidle_enter_state_coupled()
500 if (w == coupled->online_count) { in cpuidle_enter_state_coupled()
502 cpuidle_coupled_poke_others(dev->cpu, coupled); in cpuidle_enter_state_coupled()
514 while (!cpuidle_coupled_cpus_waiting(coupled) || in cpuidle_enter_state_coupled()
520 cpuidle_coupled_set_not_waiting(dev->cpu, coupled); in cpuidle_enter_state_coupled()
524 if (coupled->prevent) { in cpuidle_enter_state_coupled()
525 cpuidle_coupled_set_not_waiting(dev->cpu, coupled); in cpuidle_enter_state_coupled()
536 cpuidle_coupled_set_not_waiting(dev->cpu, coupled); in cpuidle_enter_state_coupled()
555 cpuidle_coupled_set_ready(coupled); in cpuidle_enter_state_coupled()
556 while (!cpuidle_coupled_cpus_ready(coupled)) { in cpuidle_enter_state_coupled()
558 if (!cpuidle_coupled_cpus_waiting(coupled)) in cpuidle_enter_state_coupled()
559 if (!cpuidle_coupled_set_not_ready(coupled)) in cpuidle_enter_state_coupled()
580 if (cpuidle_coupled_any_pokes_pending(coupled)) { in cpuidle_enter_state_coupled()
581 cpuidle_coupled_set_done(dev->cpu, coupled); in cpuidle_enter_state_coupled()
583 cpuidle_coupled_parallel_barrier(dev, &coupled->abort_barrier); in cpuidle_enter_state_coupled()
588 next_state = cpuidle_coupled_get_state(dev, coupled); in cpuidle_enter_state_coupled()
592 cpuidle_coupled_set_done(dev->cpu, coupled); in cpuidle_enter_state_coupled()
616 while (!cpuidle_coupled_no_cpus_ready(coupled)) in cpuidle_enter_state_coupled()
622 static void cpuidle_coupled_update_online_cpus(struct cpuidle_coupled *coupled) in cpuidle_coupled_update_online_cpus() argument
624 coupled->online_count = cpumask_weight_and(cpu_online_mask, &coupled->coupled_cpus); in cpuidle_coupled_update_online_cpus()
640 struct cpuidle_coupled *coupled; in cpuidle_coupled_register_device() local
647 if (other_dev && other_dev->coupled) { in cpuidle_coupled_register_device()
648 coupled = other_dev->coupled; in cpuidle_coupled_register_device()
654 coupled = kzalloc(sizeof(struct cpuidle_coupled), GFP_KERNEL); in cpuidle_coupled_register_device()
655 if (!coupled) in cpuidle_coupled_register_device()
658 coupled->coupled_cpus = dev->coupled_cpus; in cpuidle_coupled_register_device()
661 dev->coupled = coupled; in cpuidle_coupled_register_device()
662 if (WARN_ON(!cpumask_equal(&dev->coupled_cpus, &coupled->coupled_cpus))) in cpuidle_coupled_register_device()
663 coupled->prevent++; in cpuidle_coupled_register_device()
665 cpuidle_coupled_update_online_cpus(coupled); in cpuidle_coupled_register_device()
667 coupled->refcnt++; in cpuidle_coupled_register_device()
685 struct cpuidle_coupled *coupled = dev->coupled; in cpuidle_coupled_unregister_device() local
690 if (--coupled->refcnt) in cpuidle_coupled_unregister_device()
691 kfree(coupled); in cpuidle_coupled_unregister_device()
692 dev->coupled = NULL; in cpuidle_coupled_unregister_device()
702 static void cpuidle_coupled_prevent_idle(struct cpuidle_coupled *coupled) in cpuidle_coupled_prevent_idle() argument
707 coupled->prevent++; in cpuidle_coupled_prevent_idle()
708 cpuidle_coupled_poke_others(cpu, coupled); in cpuidle_coupled_prevent_idle()
710 while (!cpuidle_coupled_no_cpus_waiting(coupled)) in cpuidle_coupled_prevent_idle()
721 static void cpuidle_coupled_allow_idle(struct cpuidle_coupled *coupled) in cpuidle_coupled_allow_idle() argument
730 coupled->prevent--; in cpuidle_coupled_allow_idle()
732 cpuidle_coupled_poke_others(cpu, coupled); in cpuidle_coupled_allow_idle()
743 if (dev && dev->coupled) { in coupled_cpu_online()
744 cpuidle_coupled_update_online_cpus(dev->coupled); in coupled_cpu_online()
745 cpuidle_coupled_allow_idle(dev->coupled); in coupled_cpu_online()
759 if (dev && dev->coupled) in coupled_cpu_up_prepare()
760 cpuidle_coupled_prevent_idle(dev->coupled); in coupled_cpu_up_prepare()