VCPU_VSX_VR       902 arch/powerpc/kvm/powerpc.c 		val.vval = VCPU_VSX_VR(vcpu, index - 32);
VCPU_VSX_VR       904 arch/powerpc/kvm/powerpc.c 		VCPU_VSX_VR(vcpu, index - 32) = val.vval;
VCPU_VSX_VR       917 arch/powerpc/kvm/powerpc.c 		val.vval = VCPU_VSX_VR(vcpu, index - 32);
VCPU_VSX_VR       920 arch/powerpc/kvm/powerpc.c 		VCPU_VSX_VR(vcpu, index - 32) = val.vval;
VCPU_VSX_VR       938 arch/powerpc/kvm/powerpc.c 		VCPU_VSX_VR(vcpu, index - 32) = val.vval;
VCPU_VSX_VR       959 arch/powerpc/kvm/powerpc.c 		val.vval = VCPU_VSX_VR(vcpu, index - 32);
VCPU_VSX_VR       961 arch/powerpc/kvm/powerpc.c 		VCPU_VSX_VR(vcpu, index - 32) = val.vval;
VCPU_VSX_VR      1026 arch/powerpc/kvm/powerpc.c 	val.vval = VCPU_VSX_VR(vcpu, index);
VCPU_VSX_VR      1028 arch/powerpc/kvm/powerpc.c 	VCPU_VSX_VR(vcpu, index) = val.vval;
VCPU_VSX_VR      1042 arch/powerpc/kvm/powerpc.c 	val.vval = VCPU_VSX_VR(vcpu, index);
VCPU_VSX_VR      1044 arch/powerpc/kvm/powerpc.c 	VCPU_VSX_VR(vcpu, index) = val.vval;
VCPU_VSX_VR      1058 arch/powerpc/kvm/powerpc.c 	val.vval = VCPU_VSX_VR(vcpu, index);
VCPU_VSX_VR      1060 arch/powerpc/kvm/powerpc.c 	VCPU_VSX_VR(vcpu, index) = val.vval;
VCPU_VSX_VR      1074 arch/powerpc/kvm/powerpc.c 	val.vval = VCPU_VSX_VR(vcpu, index);
VCPU_VSX_VR      1076 arch/powerpc/kvm/powerpc.c 	VCPU_VSX_VR(vcpu, index) = val.vval;
VCPU_VSX_VR      1393 arch/powerpc/kvm/powerpc.c 			reg.vval = VCPU_VSX_VR(vcpu, rs - 32);
VCPU_VSX_VR      1413 arch/powerpc/kvm/powerpc.c 			reg.vval = VCPU_VSX_VR(vcpu, rs - 32);
VCPU_VSX_VR      1528 arch/powerpc/kvm/powerpc.c 	reg.vval = VCPU_VSX_VR(vcpu, index);
VCPU_VSX_VR      1546 arch/powerpc/kvm/powerpc.c 	reg.vval = VCPU_VSX_VR(vcpu, index);
VCPU_VSX_VR      1564 arch/powerpc/kvm/powerpc.c 	reg.vval = VCPU_VSX_VR(vcpu, index);
VCPU_VSX_VR      1582 arch/powerpc/kvm/powerpc.c 	reg.vval = VCPU_VSX_VR(vcpu, index);