Lines Matching refs:VCPU_FPR
152 kvm_cvt_df(&VCPU_FPR(vcpu, rt), &vcpu->arch.qpr[rt]); in kvmppc_sync_qpr()
201 kvm_cvt_fd((u32*)tmp, &VCPU_FPR(vcpu, rs)); in kvmppc_emulate_fpr_load()
205 VCPU_FPR(vcpu, rs) = *((u64*)tmp); in kvmppc_emulate_fpr_load()
227 kvm_cvt_df(&VCPU_FPR(vcpu, rs), (u32*)tmp); in kvmppc_emulate_fpr_store()
232 *((u32*)tmp) = VCPU_FPR(vcpu, rs); in kvmppc_emulate_fpr_store()
233 val = VCPU_FPR(vcpu, rs) & 0xffffffff; in kvmppc_emulate_fpr_store()
237 *((u64*)tmp) = VCPU_FPR(vcpu, rs); in kvmppc_emulate_fpr_store()
238 val = VCPU_FPR(vcpu, rs); in kvmppc_emulate_fpr_store()
295 kvm_cvt_fd(&tmp[0], &VCPU_FPR(vcpu, rs)); in kvmppc_emulate_psq_load()
313 kvm_cvt_df(&VCPU_FPR(vcpu, rs), &tmp[0]); in kvmppc_emulate_psq_store()
506 kvm_cvt_df(&VCPU_FPR(vcpu, reg_in1), &ps0_in1); in kvmppc_ps_three_in()
507 kvm_cvt_df(&VCPU_FPR(vcpu, reg_in2), &ps0_in2); in kvmppc_ps_three_in()
508 kvm_cvt_df(&VCPU_FPR(vcpu, reg_in3), &ps0_in3); in kvmppc_ps_three_in()
519 kvm_cvt_fd(&ps0_out, &VCPU_FPR(vcpu, reg_out)); in kvmppc_ps_three_in()
555 kvm_cvt_df(&VCPU_FPR(vcpu, reg_in1), &ps0_in1); in kvmppc_ps_two_in()
560 kvm_cvt_df(&VCPU_FPR(vcpu, reg_in2), &ps0_in2); in kvmppc_ps_two_in()
568 kvm_cvt_fd(&ps0_out, &VCPU_FPR(vcpu, reg_out)); in kvmppc_ps_two_in()
603 kvm_cvt_df(&VCPU_FPR(vcpu, reg_in), &ps0_in); in kvmppc_ps_one_in()
609 kvm_cvt_fd(&ps0_out, &VCPU_FPR(vcpu, reg_out)); in kvmppc_ps_one_in()
647 fpr_d = &VCPU_FPR(vcpu, ax_rd); in kvmppc_emulate_paired_single()
648 fpr_a = &VCPU_FPR(vcpu, ax_ra); in kvmppc_emulate_paired_single()
649 fpr_b = &VCPU_FPR(vcpu, ax_rb); in kvmppc_emulate_paired_single()
650 fpr_c = &VCPU_FPR(vcpu, ax_rc); in kvmppc_emulate_paired_single()
671 kvm_cvt_df(&VCPU_FPR(vcpu, i), &f); in kvmppc_emulate_paired_single()
673 i, f, VCPU_FPR(vcpu, i), i, vcpu->arch.qpr[i]); in kvmppc_emulate_paired_single()
759 VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rb); in kvmppc_emulate_paired_single()
760 VCPU_FPR(vcpu, ax_rd) ^= 0x8000000000000000ULL; in kvmppc_emulate_paired_single()
770 VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rb); in kvmppc_emulate_paired_single()
779 VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rb); in kvmppc_emulate_paired_single()
780 VCPU_FPR(vcpu, ax_rd) |= 0x8000000000000000ULL; in kvmppc_emulate_paired_single()
786 VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rb); in kvmppc_emulate_paired_single()
787 VCPU_FPR(vcpu, ax_rd) &= ~0x8000000000000000ULL; in kvmppc_emulate_paired_single()
793 VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_ra); in kvmppc_emulate_paired_single()
795 kvm_cvt_df(&VCPU_FPR(vcpu, ax_rb), in kvmppc_emulate_paired_single()
800 VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_ra); in kvmppc_emulate_paired_single()
807 &VCPU_FPR(vcpu, ax_rd)); in kvmppc_emulate_paired_single()
809 kvm_cvt_df(&VCPU_FPR(vcpu, ax_rb), in kvmppc_emulate_paired_single()
816 &VCPU_FPR(vcpu, ax_rd)); in kvmppc_emulate_paired_single()
851 VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rc); in kvmppc_emulate_paired_single()
1251 kvm_cvt_df(&VCPU_FPR(vcpu, i), &f); in kvmppc_emulate_paired_single()