Lines Matching full:vcpu

58 	STATS_DESC_COUNTER(VCPU, sum_exits),
59 STATS_DESC_COUNTER(VCPU, mmio_exits),
60 STATS_DESC_COUNTER(VCPU, signal_exits),
61 STATS_DESC_COUNTER(VCPU, light_exits),
62 STATS_DESC_COUNTER(VCPU, itlb_real_miss_exits),
63 STATS_DESC_COUNTER(VCPU, itlb_virt_miss_exits),
64 STATS_DESC_COUNTER(VCPU, dtlb_real_miss_exits),
65 STATS_DESC_COUNTER(VCPU, dtlb_virt_miss_exits),
66 STATS_DESC_COUNTER(VCPU, syscall_exits),
67 STATS_DESC_COUNTER(VCPU, isi_exits),
68 STATS_DESC_COUNTER(VCPU, dsi_exits),
69 STATS_DESC_COUNTER(VCPU, emulated_inst_exits),
70 STATS_DESC_COUNTER(VCPU, dec_exits),
71 STATS_DESC_COUNTER(VCPU, ext_intr_exits),
72 STATS_DESC_COUNTER(VCPU, halt_successful_wait),
73 STATS_DESC_COUNTER(VCPU, dbell_exits),
74 STATS_DESC_COUNTER(VCPU, gdbell_exits),
75 STATS_DESC_COUNTER(VCPU, ld),
76 STATS_DESC_COUNTER(VCPU, st),
77 STATS_DESC_COUNTER(VCPU, pf_storage),
78 STATS_DESC_COUNTER(VCPU, pf_instruc),
79 STATS_DESC_COUNTER(VCPU, sp_storage),
80 STATS_DESC_COUNTER(VCPU, sp_instruc),
81 STATS_DESC_COUNTER(VCPU, queue_intr),
82 STATS_DESC_COUNTER(VCPU, ld_slow),
83 STATS_DESC_COUNTER(VCPU, st_slow),
84 STATS_DESC_COUNTER(VCPU, pthru_all),
85 STATS_DESC_COUNTER(VCPU, pthru_host),
86 STATS_DESC_COUNTER(VCPU, pthru_bad_aff)
98 static inline void kvmppc_update_int_pending(struct kvm_vcpu *vcpu, in kvmppc_update_int_pending() argument
101 if (is_kvmppc_hv_enabled(vcpu->kvm)) in kvmppc_update_int_pending()
104 kvmppc_set_int_pending(vcpu, 1); in kvmppc_update_int_pending()
106 kvmppc_set_int_pending(vcpu, 0); in kvmppc_update_int_pending()
109 static inline bool kvmppc_critical_section(struct kvm_vcpu *vcpu) in kvmppc_critical_section() argument
115 if (is_kvmppc_hv_enabled(vcpu->kvm)) in kvmppc_critical_section()
118 crit_raw = kvmppc_get_critical(vcpu); in kvmppc_critical_section()
119 crit_r1 = kvmppc_get_gpr(vcpu, 1); in kvmppc_critical_section()
122 if (!(kvmppc_get_msr(vcpu) & MSR_SF)) { in kvmppc_critical_section()
130 crit = crit && !(kvmppc_get_msr(vcpu) & MSR_PR); in kvmppc_critical_section()
135 void kvmppc_inject_interrupt(struct kvm_vcpu *vcpu, int vec, u64 flags) in kvmppc_inject_interrupt() argument
137 vcpu->kvm->arch.kvm_ops->inject_interrupt(vcpu, vec, flags); in kvmppc_inject_interrupt()
167 void kvmppc_book3s_dequeue_irqprio(struct kvm_vcpu *vcpu, in kvmppc_book3s_dequeue_irqprio() argument
170 unsigned long old_pending = vcpu->arch.pending_exceptions; in kvmppc_book3s_dequeue_irqprio()
173 &vcpu->arch.pending_exceptions); in kvmppc_book3s_dequeue_irqprio()
175 kvmppc_update_int_pending(vcpu, vcpu->arch.pending_exceptions, in kvmppc_book3s_dequeue_irqprio()
179 void kvmppc_book3s_queue_irqprio(struct kvm_vcpu *vcpu, unsigned int vec) in kvmppc_book3s_queue_irqprio() argument
181 vcpu->stat.queue_intr++; in kvmppc_book3s_queue_irqprio()
184 &vcpu->arch.pending_exceptions); in kvmppc_book3s_queue_irqprio()
191 void kvmppc_core_queue_machine_check(struct kvm_vcpu *vcpu, ulong srr1_flags) in kvmppc_core_queue_machine_check() argument
194 kvmppc_inject_interrupt(vcpu, BOOK3S_INTERRUPT_MACHINE_CHECK, srr1_flags); in kvmppc_core_queue_machine_check()
198 void kvmppc_core_queue_syscall(struct kvm_vcpu *vcpu) in kvmppc_core_queue_syscall() argument
200 kvmppc_inject_interrupt(vcpu, BOOK3S_INTERRUPT_SYSCALL, 0); in kvmppc_core_queue_syscall()
204 void kvmppc_core_queue_program(struct kvm_vcpu *vcpu, ulong srr1_flags) in kvmppc_core_queue_program() argument
207 kvmppc_inject_interrupt(vcpu, BOOK3S_INTERRUPT_PROGRAM, srr1_flags); in kvmppc_core_queue_program()
211 void kvmppc_core_queue_fpunavail(struct kvm_vcpu *vcpu, ulong srr1_flags) in kvmppc_core_queue_fpunavail() argument
214 kvmppc_inject_interrupt(vcpu, BOOK3S_INTERRUPT_FP_UNAVAIL, srr1_flags); in kvmppc_core_queue_fpunavail()
217 void kvmppc_core_queue_vec_unavail(struct kvm_vcpu *vcpu, ulong srr1_flags) in kvmppc_core_queue_vec_unavail() argument
220 kvmppc_inject_interrupt(vcpu, BOOK3S_INTERRUPT_ALTIVEC, srr1_flags); in kvmppc_core_queue_vec_unavail()
223 void kvmppc_core_queue_vsx_unavail(struct kvm_vcpu *vcpu, ulong srr1_flags) in kvmppc_core_queue_vsx_unavail() argument
226 kvmppc_inject_interrupt(vcpu, BOOK3S_INTERRUPT_VSX, srr1_flags); in kvmppc_core_queue_vsx_unavail()
229 void kvmppc_core_queue_dec(struct kvm_vcpu *vcpu) in kvmppc_core_queue_dec() argument
231 kvmppc_book3s_queue_irqprio(vcpu, BOOK3S_INTERRUPT_DECREMENTER); in kvmppc_core_queue_dec()
235 int kvmppc_core_pending_dec(struct kvm_vcpu *vcpu) in kvmppc_core_pending_dec() argument
237 return test_bit(BOOK3S_IRQPRIO_DECREMENTER, &vcpu->arch.pending_exceptions); in kvmppc_core_pending_dec()
241 void kvmppc_core_dequeue_dec(struct kvm_vcpu *vcpu) in kvmppc_core_dequeue_dec() argument
243 kvmppc_book3s_dequeue_irqprio(vcpu, BOOK3S_INTERRUPT_DECREMENTER); in kvmppc_core_dequeue_dec()
247 void kvmppc_core_queue_external(struct kvm_vcpu *vcpu, in kvmppc_core_queue_external() argument
262 * KVM_INTERRUPT_SET on a pseries guest vcpu, because the in kvmppc_core_queue_external()
271 vcpu->arch.external_oneshot = 1; in kvmppc_core_queue_external()
273 kvmppc_book3s_queue_irqprio(vcpu, BOOK3S_INTERRUPT_EXTERNAL); in kvmppc_core_queue_external()
276 void kvmppc_core_dequeue_external(struct kvm_vcpu *vcpu) in kvmppc_core_dequeue_external() argument
278 kvmppc_book3s_dequeue_irqprio(vcpu, BOOK3S_INTERRUPT_EXTERNAL); in kvmppc_core_dequeue_external()
281 void kvmppc_core_queue_data_storage(struct kvm_vcpu *vcpu, ulong srr1_flags, in kvmppc_core_queue_data_storage() argument
284 kvmppc_set_dar(vcpu, dar); in kvmppc_core_queue_data_storage()
285 kvmppc_set_dsisr(vcpu, dsisr); in kvmppc_core_queue_data_storage()
286 kvmppc_inject_interrupt(vcpu, BOOK3S_INTERRUPT_DATA_STORAGE, srr1_flags); in kvmppc_core_queue_data_storage()
290 void kvmppc_core_queue_inst_storage(struct kvm_vcpu *vcpu, ulong srr1_flags) in kvmppc_core_queue_inst_storage() argument
292 kvmppc_inject_interrupt(vcpu, BOOK3S_INTERRUPT_INST_STORAGE, srr1_flags); in kvmppc_core_queue_inst_storage()
296 static int kvmppc_book3s_irqprio_deliver(struct kvm_vcpu *vcpu, in kvmppc_book3s_irqprio_deliver() argument
301 bool crit = kvmppc_critical_section(vcpu); in kvmppc_book3s_irqprio_deliver()
305 deliver = !kvmhv_is_nestedv2() && (kvmppc_get_msr(vcpu) & MSR_EE) && !crit; in kvmppc_book3s_irqprio_deliver()
309 deliver = !kvmhv_is_nestedv2() && (kvmppc_get_msr(vcpu) & MSR_EE) && !crit; in kvmppc_book3s_irqprio_deliver()
364 kvmppc_inject_interrupt(vcpu, vec, 0); in kvmppc_book3s_irqprio_deliver()
372 static bool clear_irqprio(struct kvm_vcpu *vcpu, unsigned int priority) in clear_irqprio() argument
384 if (vcpu->arch.external_oneshot) { in clear_irqprio()
385 vcpu->arch.external_oneshot = 0; in clear_irqprio()
394 int kvmppc_core_prepare_to_enter(struct kvm_vcpu *vcpu) in kvmppc_core_prepare_to_enter() argument
396 unsigned long *pending = &vcpu->arch.pending_exceptions; in kvmppc_core_prepare_to_enter()
397 unsigned long old_pending = vcpu->arch.pending_exceptions; in kvmppc_core_prepare_to_enter()
401 if (vcpu->arch.pending_exceptions) in kvmppc_core_prepare_to_enter()
402 printk(KERN_EMERG "KVM: Check pending: %lx\n", vcpu->arch.pending_exceptions); in kvmppc_core_prepare_to_enter()
406 if (kvmppc_book3s_irqprio_deliver(vcpu, priority) && in kvmppc_core_prepare_to_enter()
407 clear_irqprio(vcpu, priority)) { in kvmppc_core_prepare_to_enter()
408 clear_bit(priority, &vcpu->arch.pending_exceptions); in kvmppc_core_prepare_to_enter()
418 kvmppc_update_int_pending(vcpu, *pending, old_pending); in kvmppc_core_prepare_to_enter()
424 kvm_pfn_t kvmppc_gpa_to_pfn(struct kvm_vcpu *vcpu, gpa_t gpa, bool writing, in kvmppc_gpa_to_pfn() argument
427 ulong mp_pa = vcpu->arch.magic_page_pa & KVM_PAM; in kvmppc_gpa_to_pfn()
430 if (!(kvmppc_get_msr(vcpu) & MSR_SF)) in kvmppc_gpa_to_pfn()
436 ulong shared_page = ((ulong)vcpu->arch.shared) & PAGE_MASK; in kvmppc_gpa_to_pfn()
446 return gfn_to_pfn_prot(vcpu->kvm, gfn, writing, writable); in kvmppc_gpa_to_pfn()
450 int kvmppc_xlate(struct kvm_vcpu *vcpu, ulong eaddr, enum xlate_instdata xlid, in kvmppc_xlate() argument
455 int relocated = (kvmppc_get_msr(vcpu) & (data ? MSR_DR : MSR_IR)); in kvmppc_xlate()
459 r = vcpu->arch.mmu.xlate(vcpu, eaddr, pte, data, iswrite); in kvmppc_xlate()
469 if ((kvmppc_get_msr(vcpu) & (MSR_IR | MSR_DR)) == MSR_DR && in kvmppc_xlate()
471 if ((vcpu->arch.hflags & BOOK3S_HFLAG_SPLIT_HACK) && in kvmppc_xlate()
483 * as used in HEIR, vcpu->arch.last_inst and vcpu->arch.emul_inst.
484 * Like vcpu->arch.last_inst but unlike vcpu->arch.emul_inst, each
488 int kvmppc_load_last_inst(struct kvm_vcpu *vcpu, in kvmppc_load_last_inst() argument
491 ulong pc = kvmppc_get_pc(vcpu); in kvmppc_load_last_inst()
498 r = kvmppc_ld(vcpu, &pc, sizeof(u32), &iw, false); in kvmppc_load_last_inst()
505 if (kvmppc_get_msr(vcpu) & SRR1_PREFIXED) { in kvmppc_load_last_inst()
508 r = kvmppc_ld(vcpu, &pc, sizeof(u32), &suffix, false); in kvmppc_load_last_inst()
519 int kvmppc_subarch_vcpu_init(struct kvm_vcpu *vcpu) in kvmppc_subarch_vcpu_init() argument
524 void kvmppc_subarch_vcpu_uninit(struct kvm_vcpu *vcpu) in kvmppc_subarch_vcpu_uninit() argument
528 int kvm_arch_vcpu_ioctl_get_sregs(struct kvm_vcpu *vcpu, in kvm_arch_vcpu_ioctl_get_sregs() argument
533 vcpu_load(vcpu); in kvm_arch_vcpu_ioctl_get_sregs()
534 ret = vcpu->kvm->arch.kvm_ops->get_sregs(vcpu, sregs); in kvm_arch_vcpu_ioctl_get_sregs()
535 vcpu_put(vcpu); in kvm_arch_vcpu_ioctl_get_sregs()
540 int kvm_arch_vcpu_ioctl_set_sregs(struct kvm_vcpu *vcpu, in kvm_arch_vcpu_ioctl_set_sregs() argument
545 vcpu_load(vcpu); in kvm_arch_vcpu_ioctl_set_sregs()
546 ret = vcpu->kvm->arch.kvm_ops->set_sregs(vcpu, sregs); in kvm_arch_vcpu_ioctl_set_sregs()
547 vcpu_put(vcpu); in kvm_arch_vcpu_ioctl_set_sregs()
552 int kvm_arch_vcpu_ioctl_get_regs(struct kvm_vcpu *vcpu, struct kvm_regs *regs) in kvm_arch_vcpu_ioctl_get_regs() argument
556 regs->pc = kvmppc_get_pc(vcpu); in kvm_arch_vcpu_ioctl_get_regs()
557 regs->cr = kvmppc_get_cr(vcpu); in kvm_arch_vcpu_ioctl_get_regs()
558 regs->ctr = kvmppc_get_ctr(vcpu); in kvm_arch_vcpu_ioctl_get_regs()
559 regs->lr = kvmppc_get_lr(vcpu); in kvm_arch_vcpu_ioctl_get_regs()
560 regs->xer = kvmppc_get_xer(vcpu); in kvm_arch_vcpu_ioctl_get_regs()
561 regs->msr = kvmppc_get_msr(vcpu); in kvm_arch_vcpu_ioctl_get_regs()
562 regs->srr0 = kvmppc_get_srr0(vcpu); in kvm_arch_vcpu_ioctl_get_regs()
563 regs->srr1 = kvmppc_get_srr1(vcpu); in kvm_arch_vcpu_ioctl_get_regs()
564 regs->pid = kvmppc_get_pid(vcpu); in kvm_arch_vcpu_ioctl_get_regs()
565 regs->sprg0 = kvmppc_get_sprg0(vcpu); in kvm_arch_vcpu_ioctl_get_regs()
566 regs->sprg1 = kvmppc_get_sprg1(vcpu); in kvm_arch_vcpu_ioctl_get_regs()
567 regs->sprg2 = kvmppc_get_sprg2(vcpu); in kvm_arch_vcpu_ioctl_get_regs()
568 regs->sprg3 = kvmppc_get_sprg3(vcpu); in kvm_arch_vcpu_ioctl_get_regs()
569 regs->sprg4 = kvmppc_get_sprg4(vcpu); in kvm_arch_vcpu_ioctl_get_regs()
570 regs->sprg5 = kvmppc_get_sprg5(vcpu); in kvm_arch_vcpu_ioctl_get_regs()
571 regs->sprg6 = kvmppc_get_sprg6(vcpu); in kvm_arch_vcpu_ioctl_get_regs()
572 regs->sprg7 = kvmppc_get_sprg7(vcpu); in kvm_arch_vcpu_ioctl_get_regs()
575 regs->gpr[i] = kvmppc_get_gpr(vcpu, i); in kvm_arch_vcpu_ioctl_get_regs()
580 int kvm_arch_vcpu_ioctl_set_regs(struct kvm_vcpu *vcpu, struct kvm_regs *regs) in kvm_arch_vcpu_ioctl_set_regs() argument
584 kvmppc_set_pc(vcpu, regs->pc); in kvm_arch_vcpu_ioctl_set_regs()
585 kvmppc_set_cr(vcpu, regs->cr); in kvm_arch_vcpu_ioctl_set_regs()
586 kvmppc_set_ctr(vcpu, regs->ctr); in kvm_arch_vcpu_ioctl_set_regs()
587 kvmppc_set_lr(vcpu, regs->lr); in kvm_arch_vcpu_ioctl_set_regs()
588 kvmppc_set_xer(vcpu, regs->xer); in kvm_arch_vcpu_ioctl_set_regs()
589 kvmppc_set_msr(vcpu, regs->msr); in kvm_arch_vcpu_ioctl_set_regs()
590 kvmppc_set_srr0(vcpu, regs->srr0); in kvm_arch_vcpu_ioctl_set_regs()
591 kvmppc_set_srr1(vcpu, regs->srr1); in kvm_arch_vcpu_ioctl_set_regs()
592 kvmppc_set_sprg0(vcpu, regs->sprg0); in kvm_arch_vcpu_ioctl_set_regs()
593 kvmppc_set_sprg1(vcpu, regs->sprg1); in kvm_arch_vcpu_ioctl_set_regs()
594 kvmppc_set_sprg2(vcpu, regs->sprg2); in kvm_arch_vcpu_ioctl_set_regs()
595 kvmppc_set_sprg3(vcpu, regs->sprg3); in kvm_arch_vcpu_ioctl_set_regs()
596 kvmppc_set_sprg4(vcpu, regs->sprg4); in kvm_arch_vcpu_ioctl_set_regs()
597 kvmppc_set_sprg5(vcpu, regs->sprg5); in kvm_arch_vcpu_ioctl_set_regs()
598 kvmppc_set_sprg6(vcpu, regs->sprg6); in kvm_arch_vcpu_ioctl_set_regs()
599 kvmppc_set_sprg7(vcpu, regs->sprg7); in kvm_arch_vcpu_ioctl_set_regs()
602 kvmppc_set_gpr(vcpu, i, regs->gpr[i]); in kvm_arch_vcpu_ioctl_set_regs()
607 int kvm_arch_vcpu_ioctl_get_fpu(struct kvm_vcpu *vcpu, struct kvm_fpu *fpu) in kvm_arch_vcpu_ioctl_get_fpu() argument
612 int kvm_arch_vcpu_ioctl_set_fpu(struct kvm_vcpu *vcpu, struct kvm_fpu *fpu) in kvm_arch_vcpu_ioctl_set_fpu() argument
617 int kvmppc_get_one_reg(struct kvm_vcpu *vcpu, u64 id, in kvmppc_get_one_reg() argument
623 r = vcpu->kvm->arch.kvm_ops->get_one_reg(vcpu, id, val); in kvmppc_get_one_reg()
628 *val = get_reg_val(id, kvmppc_get_dar(vcpu)); in kvmppc_get_one_reg()
631 *val = get_reg_val(id, kvmppc_get_dsisr(vcpu)); in kvmppc_get_one_reg()
635 *val = get_reg_val(id, kvmppc_get_fpr(vcpu, i)); in kvmppc_get_one_reg()
638 *val = get_reg_val(id, kvmppc_get_fpscr(vcpu)); in kvmppc_get_one_reg()
644 val->vsxval[0] = kvmppc_get_vsx_fpr(vcpu, i, 0); in kvmppc_get_one_reg()
645 val->vsxval[1] = kvmppc_get_vsx_fpr(vcpu, i, 1); in kvmppc_get_one_reg()
656 if (!vcpu->arch.icp && !vcpu->arch.xive_vcpu) { in kvmppc_get_one_reg()
661 *val = get_reg_val(id, kvmppc_xive_get_icp(vcpu)); in kvmppc_get_one_reg()
663 *val = get_reg_val(id, kvmppc_xics_get_icp(vcpu)); in kvmppc_get_one_reg()
668 if (!vcpu->arch.xive_vcpu) { in kvmppc_get_one_reg()
673 r = kvmppc_xive_native_get_vp(vcpu, val); in kvmppc_get_one_reg()
679 *val = get_reg_val(id, vcpu->arch.fscr); in kvmppc_get_one_reg()
682 *val = get_reg_val(id, kvmppc_get_tar(vcpu)); in kvmppc_get_one_reg()
685 *val = get_reg_val(id, kvmppc_get_ebbhr(vcpu)); in kvmppc_get_one_reg()
688 *val = get_reg_val(id, kvmppc_get_ebbrr(vcpu)); in kvmppc_get_one_reg()
691 *val = get_reg_val(id, kvmppc_get_bescr(vcpu)); in kvmppc_get_one_reg()
694 *val = get_reg_val(id, kvmppc_get_ic(vcpu)); in kvmppc_get_one_reg()
705 int kvmppc_set_one_reg(struct kvm_vcpu *vcpu, u64 id, in kvmppc_set_one_reg() argument
711 r = vcpu->kvm->arch.kvm_ops->set_one_reg(vcpu, id, val); in kvmppc_set_one_reg()
716 kvmppc_set_dar(vcpu, set_reg_val(id, *val)); in kvmppc_set_one_reg()
719 kvmppc_set_dsisr(vcpu, set_reg_val(id, *val)); in kvmppc_set_one_reg()
723 kvmppc_set_fpr(vcpu, i, set_reg_val(id, *val)); in kvmppc_set_one_reg()
726 vcpu->arch.fp.fpscr = set_reg_val(id, *val); in kvmppc_set_one_reg()
732 kvmppc_set_vsx_fpr(vcpu, i, 0, val->vsxval[0]); in kvmppc_set_one_reg()
733 kvmppc_set_vsx_fpr(vcpu, i, 1, val->vsxval[1]); in kvmppc_set_one_reg()
741 if (!vcpu->arch.icp && !vcpu->arch.xive_vcpu) { in kvmppc_set_one_reg()
746 r = kvmppc_xive_set_icp(vcpu, set_reg_val(id, *val)); in kvmppc_set_one_reg()
748 r = kvmppc_xics_set_icp(vcpu, set_reg_val(id, *val)); in kvmppc_set_one_reg()
753 if (!vcpu->arch.xive_vcpu) { in kvmppc_set_one_reg()
758 r = kvmppc_xive_native_set_vp(vcpu, val); in kvmppc_set_one_reg()
764 kvmppc_set_fpscr(vcpu, set_reg_val(id, *val)); in kvmppc_set_one_reg()
767 kvmppc_set_tar(vcpu, set_reg_val(id, *val)); in kvmppc_set_one_reg()
770 kvmppc_set_ebbhr(vcpu, set_reg_val(id, *val)); in kvmppc_set_one_reg()
773 kvmppc_set_ebbrr(vcpu, set_reg_val(id, *val)); in kvmppc_set_one_reg()
776 kvmppc_set_bescr(vcpu, set_reg_val(id, *val)); in kvmppc_set_one_reg()
779 kvmppc_set_ic(vcpu, set_reg_val(id, *val)); in kvmppc_set_one_reg()
790 void kvmppc_core_vcpu_load(struct kvm_vcpu *vcpu, int cpu) in kvmppc_core_vcpu_load() argument
792 vcpu->kvm->arch.kvm_ops->vcpu_load(vcpu, cpu); in kvmppc_core_vcpu_load()
795 void kvmppc_core_vcpu_put(struct kvm_vcpu *vcpu) in kvmppc_core_vcpu_put() argument
797 vcpu->kvm->arch.kvm_ops->vcpu_put(vcpu); in kvmppc_core_vcpu_put()
800 void kvmppc_set_msr(struct kvm_vcpu *vcpu, u64 msr) in kvmppc_set_msr() argument
802 vcpu->kvm->arch.kvm_ops->set_msr(vcpu, msr); in kvmppc_set_msr()
806 int kvmppc_vcpu_run(struct kvm_vcpu *vcpu) in kvmppc_vcpu_run() argument
808 return vcpu->kvm->arch.kvm_ops->vcpu_run(vcpu); in kvmppc_vcpu_run()
811 int kvm_arch_vcpu_ioctl_translate(struct kvm_vcpu *vcpu, in kvm_arch_vcpu_ioctl_translate() argument
817 int kvm_arch_vcpu_ioctl_set_guest_debug(struct kvm_vcpu *vcpu, in kvm_arch_vcpu_ioctl_set_guest_debug() argument
820 vcpu_load(vcpu); in kvm_arch_vcpu_ioctl_set_guest_debug()
821 vcpu->guest_debug = dbg->control; in kvm_arch_vcpu_ioctl_set_guest_debug()
822 vcpu_put(vcpu); in kvm_arch_vcpu_ioctl_set_guest_debug()
826 void kvmppc_decrementer_func(struct kvm_vcpu *vcpu) in kvmppc_decrementer_func() argument
828 kvmppc_core_queue_dec(vcpu); in kvmppc_decrementer_func()
829 kvm_vcpu_kick(vcpu); in kvmppc_decrementer_func()
832 int kvmppc_core_vcpu_create(struct kvm_vcpu *vcpu) in kvmppc_core_vcpu_create() argument
834 return vcpu->kvm->arch.kvm_ops->vcpu_create(vcpu); in kvmppc_core_vcpu_create()
837 void kvmppc_core_vcpu_free(struct kvm_vcpu *vcpu) in kvmppc_core_vcpu_free() argument
839 vcpu->kvm->arch.kvm_ops->vcpu_free(vcpu); in kvmppc_core_vcpu_free()
842 int kvmppc_core_check_requests(struct kvm_vcpu *vcpu) in kvmppc_core_check_requests() argument
844 return vcpu->kvm->arch.kvm_ops->check_requests(vcpu); in kvmppc_core_check_requests()
933 int kvmppc_h_logical_ci_load(struct kvm_vcpu *vcpu) in kvmppc_h_logical_ci_load() argument
935 unsigned long size = kvmppc_get_gpr(vcpu, 4); in kvmppc_h_logical_ci_load()
936 unsigned long addr = kvmppc_get_gpr(vcpu, 5); in kvmppc_h_logical_ci_load()
944 srcu_idx = srcu_read_lock(&vcpu->kvm->srcu); in kvmppc_h_logical_ci_load()
945 ret = kvm_io_bus_read(vcpu, KVM_MMIO_BUS, addr, size, &buf); in kvmppc_h_logical_ci_load()
946 srcu_read_unlock(&vcpu->kvm->srcu, srcu_idx); in kvmppc_h_logical_ci_load()
952 kvmppc_set_gpr(vcpu, 4, *(u8 *)&buf); in kvmppc_h_logical_ci_load()
956 kvmppc_set_gpr(vcpu, 4, be16_to_cpu(*(__be16 *)&buf)); in kvmppc_h_logical_ci_load()
960 kvmppc_set_gpr(vcpu, 4, be32_to_cpu(*(__be32 *)&buf)); in kvmppc_h_logical_ci_load()
964 kvmppc_set_gpr(vcpu, 4, be64_to_cpu(*(__be64 *)&buf)); in kvmppc_h_logical_ci_load()
975 int kvmppc_h_logical_ci_store(struct kvm_vcpu *vcpu) in kvmppc_h_logical_ci_store() argument
977 unsigned long size = kvmppc_get_gpr(vcpu, 4); in kvmppc_h_logical_ci_store()
978 unsigned long addr = kvmppc_get_gpr(vcpu, 5); in kvmppc_h_logical_ci_store()
979 unsigned long val = kvmppc_get_gpr(vcpu, 6); in kvmppc_h_logical_ci_store()
1005 srcu_idx = srcu_read_lock(&vcpu->kvm->srcu); in kvmppc_h_logical_ci_store()
1006 ret = kvm_io_bus_write(vcpu, KVM_MMIO_BUS, addr, size, &buf); in kvmppc_h_logical_ci_store()
1007 srcu_read_unlock(&vcpu->kvm->srcu, srcu_idx); in kvmppc_h_logical_ci_store()