cr0                11 arch/arm/include/asm/hardware/ssp.h 	unsigned int	cr0;
cr0                18 arch/arm/include/asm/kvm_hyp.h 	"mrc", "mcr", __stringify(p10, 7, %0, CRn, cr0, 0), u32
cr0                12 arch/arm/include/asm/vfp.h #define FPSID			cr0
cr0                13 arch/arm/include/asm/vfpmacros.h 	MRC\cond	p10, 7, \rd, \sysreg, cr0, 0	@ FMRX	\rd, \sysreg
cr0                17 arch/arm/include/asm/vfpmacros.h 	MCR\cond	p10, 7, \rd, \sysreg, cr0, 0	@ FMXR	\sysreg, \rd
cr0                23 arch/arm/include/asm/vfpmacros.h 	LDC	p11, cr0, [\base],#33*4		    @ FLDMIAX \base!, {d0-d15}
cr0                25 arch/arm/include/asm/vfpmacros.h 	LDC	p11, cr0, [\base],#32*4		    @ FLDMIAD \base!, {d0-d15}
cr0                32 arch/arm/include/asm/vfpmacros.h 	ldclne	p11, cr0, [\base],#32*4		    @ FLDMIAD \base!, {d16-d31}
cr0                38 arch/arm/include/asm/vfpmacros.h 	ldcleq	p11, cr0, [\base],#32*4		    @ FLDMIAD \base!, {d16-d31}
cr0                47 arch/arm/include/asm/vfpmacros.h 	STC	p11, cr0, [\base],#33*4		    @ FSTMIAX \base!, {d0-d15}
cr0                49 arch/arm/include/asm/vfpmacros.h 	STC	p11, cr0, [\base],#32*4		    @ FSTMIAD \base!, {d0-d15}
cr0                56 arch/arm/include/asm/vfpmacros.h 	stclne	p11, cr0, [\base],#32*4		    @ FSTMIAD \base!, {d16-d31}
cr0                62 arch/arm/include/asm/vfpmacros.h 	stcleq	p11, cr0, [\base],#32*4		    @ FSTMIAD \base!, {d16-d31}
cr0               160 arch/arm/mach-sa1100/ssp.c 	ssp->cr0 = Ser4SSCR0;
cr0               176 arch/arm/mach-sa1100/ssp.c 	Ser4SSCR0 = ssp->cr0 & ~SSCR0_SSE;
cr0               178 arch/arm/mach-sa1100/ssp.c 	Ser4SSCR0 = ssp->cr0;
cr0               117 arch/parisc/include/asm/asmregs.h rctr:	.reg	%cr0
cr0               144 arch/parisc/include/asm/asmregs.h cr0:	.reg	%cr0
cr0                50 arch/parisc/include/asm/kgdb.h 	unsigned long cr0;
cr0                61 arch/parisc/include/uapi/asm/ptrace.h 	unsigned long cr0;
cr0               470 arch/parisc/kernel/ptrace.c 	case RI(cr0):			return mfctl(0);
cr0               523 arch/parisc/kernel/ptrace.c         case cr0, cr24, cr25, cr26, cr27, cr28, cr29, cr30, cr31;
cr0               334 arch/powerpc/boot/4xx.c 	u32 cr0 = mfdcr(DCRN_CPC0_CR0);
cr0               364 arch/powerpc/boot/4xx.c 	if (cr0 & CPC0_CR0_U0EC)
cr0               369 arch/powerpc/boot/4xx.c 		uart0 = plb / CPC0_CR0_UDIV(cr0);
cr0               371 arch/powerpc/boot/4xx.c 	if (cr0 & CPC0_CR0_U1EC)
cr0               376 arch/powerpc/boot/4xx.c 		uart1 = plb / CPC0_CR0_UDIV(cr0);
cr0               631 arch/s390/include/asm/kvm_host.h 	unsigned long cr0;
cr0               152 arch/s390/kernel/early.c 	unsigned long cr0, cr0_new;
cr0               160 arch/s390/kernel/early.c 	__ctl_store(cr0, 0, 0);
cr0               161 arch/s390/kernel/early.c 	cr0_new = cr0 & ~(1UL << 28);
cr0               164 arch/s390/kernel/early.c 	__ctl_load(cr0, 0, 0);
cr0                78 arch/s390/kernel/nmi.c 	unsigned long origin, cr0, size;
cr0                94 arch/s390/kernel/nmi.c 	__ctl_store(cr0, 0, 0);
cr0                98 arch/s390/kernel/nmi.c 	__ctl_load(cr0, 0, 0);
cr0               132 arch/s390/kvm/guestdbg.c 	vcpu->arch.guestdbg.cr0 = vcpu->arch.sie_block->gcr[0];
cr0               140 arch/s390/kvm/guestdbg.c 	vcpu->arch.sie_block->gcr[0] = vcpu->arch.guestdbg.cr0;
cr0              1091 arch/s390/kvm/vsie.c 	union ctlreg0 cr0;
cr0              1096 arch/s390/kvm/vsie.c 	cr0.val = vcpu->arch.sie_block->gcr[0];
cr0              1097 arch/s390/kvm/vsie.c 	edat = cr0.edat && test_kvm_facility(vcpu->kvm, 8);
cr0                36 arch/s390/lib/delay.c 	unsigned long cr0, cr0_new, psw_mask;
cr0                41 arch/s390/lib/delay.c 	__ctl_store(cr0, 0, 0);
cr0                42 arch/s390/lib/delay.c 	cr0_new = cr0 & ~CR0_IRQ_SUBCLASS_MASK;
cr0                51 arch/s390/lib/delay.c 	__ctl_load(cr0, 0, 0);
cr0               145 arch/s390/mm/maccess.c 	unsigned long cr0, flags, prefix;
cr0               148 arch/s390/mm/maccess.c 	__ctl_store(cr0, 0, 0);
cr0               160 arch/s390/mm/maccess.c 	__ctl_load(cr0, 0, 0);
cr0                22 arch/sh/include/cpu-sh5/cpu/registers.h #define SR	cr0
cr0                18 arch/x86/boot/cpuflags.c 	unsigned long cr0;
cr0                20 arch/x86/boot/cpuflags.c 	asm volatile("mov %%cr0,%0" : "=r" (cr0));
cr0                21 arch/x86/boot/cpuflags.c 	if (cr0 & (X86_CR0_EM|X86_CR0_TS)) {
cr0                22 arch/x86/boot/cpuflags.c 		cr0 &= ~(X86_CR0_EM|X86_CR0_TS);
cr0                23 arch/x86/boot/cpuflags.c 		asm volatile("mov %0,%%cr0" : : "r" (cr0));
cr0               546 arch/x86/include/asm/kvm_host.h 	unsigned long cr0;
cr0              1047 arch/x86/include/asm/kvm_host.h 	void (*set_cr0)(struct kvm_vcpu *vcpu, unsigned long cr0);
cr0              1382 arch/x86/include/asm/kvm_host.h int kvm_set_cr0(struct kvm_vcpu *vcpu, unsigned long cr0);
cr0                22 arch/x86/include/asm/suspend_32.h 	unsigned long cr0, cr2, cr3, cr4;
cr0                37 arch/x86/include/asm/suspend_64.h 	unsigned long cr0, cr2, cr3, cr4;
cr0               179 arch/x86/include/asm/svm.h 	u64 cr0;
cr0               154 arch/x86/include/uapi/asm/kvm.h 	__u64 cr0, cr2, cr3, cr4, cr8;
cr0                76 arch/x86/kernel/asm-offsets_64.c 	ENTRY(cr0);
cr0               139 arch/x86/kernel/cpu/mtrr/cyrix.c 	u32 cr0;
cr0               151 arch/x86/kernel/cpu/mtrr/cyrix.c 	cr0 = read_cr0() | X86_CR0_CD;
cr0               153 arch/x86/kernel/cpu/mtrr/cyrix.c 	write_cr0(cr0);
cr0               732 arch/x86/kernel/cpu/mtrr/generic.c 	unsigned long cr0;
cr0               744 arch/x86/kernel/cpu/mtrr/generic.c 	cr0 = read_cr0() | X86_CR0_CD;
cr0               745 arch/x86/kernel/cpu/mtrr/generic.c 	write_cr0(cr0);
cr0                19 arch/x86/kernel/fpu/init.c 	unsigned long cr0;
cr0                29 arch/x86/kernel/fpu/init.c 	cr0 = read_cr0();
cr0                30 arch/x86/kernel/fpu/init.c 	cr0 &= ~(X86_CR0_TS|X86_CR0_EM); /* clear TS and EM */
cr0                32 arch/x86/kernel/fpu/init.c 		cr0 |= X86_CR0_EM;
cr0                33 arch/x86/kernel/fpu/init.c 	write_cr0(cr0);
cr0                55 arch/x86/kernel/fpu/init.c 	unsigned long cr0;
cr0                60 arch/x86/kernel/fpu/init.c 	cr0 = read_cr0();
cr0                61 arch/x86/kernel/fpu/init.c 	cr0 &= ~(X86_CR0_TS | X86_CR0_EM);
cr0                62 arch/x86/kernel/fpu/init.c 	write_cr0(cr0);
cr0                63 arch/x86/kernel/process_32.c 	unsigned long cr0 = 0L, cr2 = 0L, cr3 = 0L, cr4 = 0L;
cr0                84 arch/x86/kernel/process_32.c 	cr0 = read_cr0();
cr0                89 arch/x86/kernel/process_32.c 			cr0, cr2, cr3, cr4);
cr0                69 arch/x86/kernel/process_64.c 	unsigned long cr0 = 0L, cr2 = 0L, cr3 = 0L, cr4 = 0L, fs, gs, shadowgs;
cr0               112 arch/x86/kernel/process_64.c 	cr0 = read_cr0();
cr0               120 arch/x86/kernel/process_64.c 			es, cr0);
cr0               880 arch/x86/kernel/traps.c 	unsigned long cr0 = read_cr0();
cr0               885 arch/x86/kernel/traps.c 	if (!boot_cpu_has(X86_FEATURE_FPU) && (cr0 & X86_CR0_EM)) {
cr0               897 arch/x86/kernel/traps.c 	if (WARN(cr0 & X86_CR0_TS, "CR0.TS was set")) {
cr0               899 arch/x86/kernel/traps.c 		write_cr0(cr0 & ~X86_CR0_TS);
cr0              2447 arch/x86/kvm/emulate.c 				    u64 cr0, u64 cr3, u64 cr4)
cr0              2472 arch/x86/kvm/emulate.c 	bad = ctxt->ops->set_cr(ctxt, 0, cr0);
cr0              2497 arch/x86/kvm/emulate.c 	u32 val, cr0, cr3, cr4;
cr0              2500 arch/x86/kvm/emulate.c 	cr0 =                      GET_SMSTATE(u32, smstate, 0x7ffc);
cr0              2543 arch/x86/kvm/emulate.c 	return rsm_enter_protected_mode(ctxt, cr0, cr3, cr4);
cr0              2552 arch/x86/kvm/emulate.c 	u64 val, cr0, cr3, cr4;
cr0              2568 arch/x86/kvm/emulate.c 	cr0 =                       GET_SMSTATE(u64, smstate, 0x7f58);
cr0              2597 arch/x86/kvm/emulate.c 	r = rsm_enter_protected_mode(ctxt, cr0, cr3, cr4);
cr0              2613 arch/x86/kvm/emulate.c 	unsigned long cr0, cr4, efer;
cr0              2654 arch/x86/kvm/emulate.c 	cr0 = ctxt->ops->get_cr(ctxt, 0);
cr0              2655 arch/x86/kvm/emulate.c 	if (cr0 & X86_CR0_PE)
cr0              2656 arch/x86/kvm/emulate.c 		ctxt->ops->set_cr(ctxt, 0, cr0 & ~(X86_CR0_PG | X86_CR0_PE));
cr0              3823 arch/x86/kvm/emulate.c 	ulong cr0;
cr0              3825 arch/x86/kvm/emulate.c 	cr0 = ctxt->ops->get_cr(ctxt, 0);
cr0              3826 arch/x86/kvm/emulate.c 	cr0 &= ~X86_CR0_TS;
cr0              3827 arch/x86/kvm/emulate.c 	ctxt->ops->set_cr(ctxt, 0, cr0);
cr0                94 arch/x86/kvm/kvm_cache_regs.h 	return vcpu->arch.cr0 & mask;
cr0               386 arch/x86/kvm/svm.c static void svm_set_cr0(struct kvm_vcpu *vcpu, unsigned long cr0);
cr0              2582 arch/x86/kvm/svm.c 	ulong gcr0 = svm->vcpu.arch.cr0;
cr0              2583 arch/x86/kvm/svm.c 	u64 *hcr0 = &svm->vmcb->save.cr0;
cr0              2599 arch/x86/kvm/svm.c static void svm_set_cr0(struct kvm_vcpu *vcpu, unsigned long cr0)
cr0              2605 arch/x86/kvm/svm.c 		if (!is_paging(vcpu) && (cr0 & X86_CR0_PG)) {
cr0              2610 arch/x86/kvm/svm.c 		if (is_paging(vcpu) && !(cr0 & X86_CR0_PG)) {
cr0              2616 arch/x86/kvm/svm.c 	vcpu->arch.cr0 = cr0;
cr0              2619 arch/x86/kvm/svm.c 		cr0 |= X86_CR0_PG | X86_CR0_WP;
cr0              2627 arch/x86/kvm/svm.c 		cr0 &= ~(X86_CR0_CD | X86_CR0_NW);
cr0              2628 arch/x86/kvm/svm.c 	svm->vmcb->save.cr0 = cr0;
cr0              3388 arch/x86/kvm/svm.c 	nested_vmcb->save.cr0    = kvm_read_cr0(&svm->vcpu);
cr0              3459 arch/x86/kvm/svm.c 	svm_set_cr0(&svm->vcpu, hsave->save.cr0 | X86_CR0_PE);
cr0              3563 arch/x86/kvm/svm.c 	svm_set_cr0(&svm->vcpu, nested_vmcb->save.cr0);
cr0              3704 arch/x86/kvm/svm.c 	hsave->save.cr0    = kvm_read_cr0(&svm->vcpu);
cr0              4006 arch/x86/kvm/svm.c 	unsigned long cr0 = svm->vcpu.arch.cr0;
cr0              4016 arch/x86/kvm/svm.c 	cr0 &= ~SVM_CR0_SELECTIVE_MASK;
cr0              4019 arch/x86/kvm/svm.c 	if (cr0 ^ val) {
cr0              4940 arch/x86/kvm/svm.c 	       "cr0:", save->cr0, "cr2:", save->cr2);
cr0              4985 arch/x86/kvm/svm.c 		vcpu->arch.cr0 = svm->vmcb->save.cr0;
cr0              6132 arch/x86/kvm/svm.c 		unsigned long cr0, val;
cr0              6147 arch/x86/kvm/svm.c 		cr0 = vcpu->arch.cr0 & ~SVM_CR0_SELECTIVE_MASK;
cr0              6151 arch/x86/kvm/svm.c 			cr0 &= 0xfUL;
cr0              6154 arch/x86/kvm/svm.c 			if (cr0 & X86_CR0_PE)
cr0              6158 arch/x86/kvm/svm.c 		if (cr0 ^ val)
cr0              2830 arch/x86/kvm/vmx/vmx.c 	vcpu->arch.cr0 &= ~cr0_guest_owned_bits;
cr0              2831 arch/x86/kvm/vmx/vmx.c 	vcpu->arch.cr0 |= vmcs_readl(GUEST_CR0) & cr0_guest_owned_bits;
cr0              2883 arch/x86/kvm/vmx/vmx.c 					unsigned long cr0,
cr0              2890 arch/x86/kvm/vmx/vmx.c 	if (!(cr0 & X86_CR0_PG)) {
cr0              2894 arch/x86/kvm/vmx/vmx.c 		vcpu->arch.cr0 = cr0;
cr0              2900 arch/x86/kvm/vmx/vmx.c 		vcpu->arch.cr0 = cr0;
cr0              2904 arch/x86/kvm/vmx/vmx.c 	if (!(cr0 & X86_CR0_WP))
cr0              2908 arch/x86/kvm/vmx/vmx.c void vmx_set_cr0(struct kvm_vcpu *vcpu, unsigned long cr0)
cr0              2913 arch/x86/kvm/vmx/vmx.c 	hw_cr0 = (cr0 & ~KVM_VM_CR0_ALWAYS_OFF);
cr0              2919 arch/x86/kvm/vmx/vmx.c 		if (vmx->rmode.vm86_active && (cr0 & X86_CR0_PE))
cr0              2922 arch/x86/kvm/vmx/vmx.c 		if (!vmx->rmode.vm86_active && !(cr0 & X86_CR0_PE))
cr0              2928 arch/x86/kvm/vmx/vmx.c 		if (!is_paging(vcpu) && (cr0 & X86_CR0_PG))
cr0              2930 arch/x86/kvm/vmx/vmx.c 		if (is_paging(vcpu) && !(cr0 & X86_CR0_PG))
cr0              2936 arch/x86/kvm/vmx/vmx.c 		ept_update_paging_mode_cr0(&hw_cr0, cr0, vcpu);
cr0              2938 arch/x86/kvm/vmx/vmx.c 	vmcs_writel(CR0_READ_SHADOW, cr0);
cr0              2940 arch/x86/kvm/vmx/vmx.c 	vcpu->arch.cr0 = cr0;
cr0              3861 arch/x86/kvm/vmx/vmx.c 	unsigned long cr0, cr3, cr4;
cr0              3863 arch/x86/kvm/vmx/vmx.c 	cr0 = read_cr0();
cr0              3864 arch/x86/kvm/vmx/vmx.c 	WARN_ON(cr0 & X86_CR0_TS);
cr0              3865 arch/x86/kvm/vmx/vmx.c 	vmcs_writel(HOST_CR0, cr0);  /* 22.2.3 */
cr0              4258 arch/x86/kvm/vmx/vmx.c 	u64 cr0;
cr0              4340 arch/x86/kvm/vmx/vmx.c 	cr0 = X86_CR0_NW | X86_CR0_CD | X86_CR0_ET;
cr0              4341 arch/x86/kvm/vmx/vmx.c 	vmx->vcpu.arch.cr0 = cr0;
cr0              4342 arch/x86/kvm/vmx/vmx.c 	vmx_set_cr0(vcpu, cr0); /* enter rmode */
cr0               322 arch/x86/kvm/vmx/vmx.h void vmx_set_cr0(struct kvm_vcpu *vcpu, unsigned long cr0);
cr0               761 arch/x86/kvm/x86.c int kvm_set_cr0(struct kvm_vcpu *vcpu, unsigned long cr0)
cr0               766 arch/x86/kvm/x86.c 	cr0 |= X86_CR0_ET;
cr0               769 arch/x86/kvm/x86.c 	if (cr0 & 0xffffffff00000000UL)
cr0               773 arch/x86/kvm/x86.c 	cr0 &= ~CR0_RESERVED_BITS;
cr0               775 arch/x86/kvm/x86.c 	if ((cr0 & X86_CR0_NW) && !(cr0 & X86_CR0_CD))
cr0               778 arch/x86/kvm/x86.c 	if ((cr0 & X86_CR0_PG) && !(cr0 & X86_CR0_PE))
cr0               781 arch/x86/kvm/x86.c 	if (!is_paging(vcpu) && (cr0 & X86_CR0_PG)) {
cr0               798 arch/x86/kvm/x86.c 	if (!(cr0 & X86_CR0_PG) && kvm_read_cr4_bits(vcpu, X86_CR4_PCIDE))
cr0               801 arch/x86/kvm/x86.c 	kvm_x86_ops->set_cr0(vcpu, cr0);
cr0               803 arch/x86/kvm/x86.c 	if ((cr0 ^ old_cr0) & X86_CR0_PG) {
cr0               808 arch/x86/kvm/x86.c 	if ((cr0 ^ old_cr0) & update_bits)
cr0               811 arch/x86/kvm/x86.c 	if (((cr0 ^ old_cr0) & X86_CR0_CD) &&
cr0              7861 arch/x86/kvm/x86.c 	u32 cr0;
cr0              7890 arch/x86/kvm/x86.c 	cr0 = vcpu->arch.cr0 & ~(X86_CR0_PE | X86_CR0_EM | X86_CR0_TS | X86_CR0_PG);
cr0              7891 arch/x86/kvm/x86.c 	kvm_x86_ops->set_cr0(vcpu, cr0);
cr0              7892 arch/x86/kvm/x86.c 	vcpu->arch.cr0 = cr0;
cr0              8743 arch/x86/kvm/x86.c 	sregs->cr0 = kvm_read_cr0(vcpu);
cr0              8843 arch/x86/kvm/x86.c 	if ((sregs->efer & EFER_LME) && (sregs->cr0 & X86_CR0_PG)) {
cr0              8898 arch/x86/kvm/x86.c 	mmu_reset_needed |= kvm_read_cr0(vcpu) != sregs->cr0;
cr0              8899 arch/x86/kvm/x86.c 	kvm_x86_ops->set_cr0(vcpu, sregs->cr0);
cr0              8900 arch/x86/kvm/x86.c 	vcpu->arch.cr0 = sregs->cr0;
cr0              9136 arch/x86/kvm/x86.c 	vcpu->arch.cr0 |= X86_CR0_ET;
cr0               122 arch/x86/power/cpu.c 	ctxt->cr0 = read_cr0();
cr0               212 arch/x86/power/cpu.c 	write_cr0(ctxt->cr0);
cr0               857 arch/x86/xen/enlighten_pv.c 	unsigned long cr0 = this_cpu_read(xen_cr0_value);
cr0               859 arch/x86/xen/enlighten_pv.c 	if (unlikely(cr0 == 0)) {
cr0               860 arch/x86/xen/enlighten_pv.c 		cr0 = native_read_cr0();
cr0               861 arch/x86/xen/enlighten_pv.c 		this_cpu_write(xen_cr0_value, cr0);
cr0               864 arch/x86/xen/enlighten_pv.c 	return cr0;
cr0               867 arch/x86/xen/enlighten_pv.c static void xen_write_cr0(unsigned long cr0)
cr0               871 arch/x86/xen/enlighten_pv.c 	this_cpu_write(xen_cr0_value, cr0);
cr0               877 arch/x86/xen/enlighten_pv.c 	MULTI_fpu_taskswitch(mcs.mc, (cr0 & X86_CR0_TS) != 0);
cr0               105 drivers/cpufreq/powernow-k6.c 	unsigned long cr0;
cr0               114 drivers/cpufreq/powernow-k6.c 	cr0 = read_cr0();
cr0               115 drivers/cpufreq/powernow-k6.c 	write_cr0(cr0 | X86_CR0_CD);
cr0               129 drivers/cpufreq/powernow-k6.c 	write_cr0(cr0);
cr0                79 drivers/crypto/ccp/ccp-dev-v3.c 	u32 cr0, cmd;
cr0                89 drivers/crypto/ccp/ccp-dev-v3.c 	cr0 = (cmd_q->id << REQ0_CMD_Q_SHIFT)
cr0                94 drivers/crypto/ccp/ccp-dev-v3.c 		cr0 |= REQ0_STOP_ON_COMPLETE
cr0                98 drivers/crypto/ccp/ccp-dev-v3.c 		cr0 |= REQ0_INT_ON_COMPLETE;
cr0               111 drivers/crypto/ccp/ccp-dev-v3.c 	iowrite32(cr0, ccp->io_regs + CMD_REQ0);
cr0               115 drivers/crypto/ccp/ccp-dev-v3.c 	if (cr0 & REQ0_INT_ON_COMPLETE) {
cr0               554 drivers/gpu/drm/mcde/mcde_display.c 	u32 cr0, cr1;
cr0               559 drivers/gpu/drm/mcde/mcde_display.c 		cr0 = MCDE_CRA0;
cr0               564 drivers/gpu/drm/mcde/mcde_display.c 		cr0 = MCDE_CRB0;
cr0               581 drivers/gpu/drm/mcde/mcde_display.c 	writel(val, mcde->regs + cr0);
cr0                71 drivers/input/touchscreen/mc13783_ts.c 	int cr0, cr1;
cr0                83 drivers/input/touchscreen/mc13783_ts.c 	cr0 = (priv->sample[2] >> 12) & 0xfff;
cr0                88 drivers/input/touchscreen/mc13783_ts.c 		x0, x1, x2, y0, y1, y2, cr0, cr1);
cr0                93 drivers/input/touchscreen/mc13783_ts.c 	cr0 = (cr0 + cr1) / 2;
cr0                95 drivers/input/touchscreen/mc13783_ts.c 	if (!cr0 || !sample_tolerance ||
cr0                99 drivers/input/touchscreen/mc13783_ts.c 		if (cr0) {
cr0               104 drivers/input/touchscreen/mc13783_ts.c 					x1, y1, 0x1000 - cr0);
cr0               111 drivers/input/touchscreen/mc13783_ts.c 				cr0 ? 0x1000 - cr0 : cr0);
cr0               112 drivers/input/touchscreen/mc13783_ts.c 		input_report_key(idev, BTN_TOUCH, cr0);
cr0               537 drivers/s390/char/sclp.c 	unsigned long cr0, cr0_sync;
cr0               558 drivers/s390/char/sclp.c 	__ctl_store(cr0, 0, 0);
cr0               559 drivers/s390/char/sclp.c 	cr0_sync = cr0 & ~CR0_IRQ_SUBCLASS_MASK;
cr0               573 drivers/s390/char/sclp.c 	__ctl_load(cr0, 0, 0);
cr0                31 drivers/s390/char/sclp_early_core.c 	union ctlreg0 cr0, cr0_new;
cr0                33 drivers/s390/char/sclp_early_core.c 	__ctl_store(cr0.val, 0, 0);
cr0                34 drivers/s390/char/sclp_early_core.c 	cr0_new.val = cr0.val & ~CR0_IRQ_SUBCLASS_MASK;
cr0                60 drivers/s390/char/sclp_early_core.c 	__ctl_load(cr0.val, 0, 0);
cr0               296 drivers/spi/spi-dw.c 	u32 cr0;
cr0               328 drivers/spi/spi-dw.c 	cr0 = (transfer->bits_per_word - 1)
cr0               347 drivers/spi/spi-dw.c 		cr0 &= ~SPI_TMOD_MASK;
cr0               348 drivers/spi/spi-dw.c 		cr0 |= (chip->tmode << SPI_TMOD_OFFSET);
cr0               351 drivers/spi/spi-dw.c 	dw_writel(dws, DW_SPI_CTRL0, cr0);
cr0               153 drivers/spi/spi-ep93xx.c 	u16 cr0;
cr0               161 drivers/spi/spi-ep93xx.c 	cr0 = div_scr << SSPCR0_SCR_SHIFT;
cr0               162 drivers/spi/spi-ep93xx.c 	cr0 |= (spi->mode & (SPI_CPHA | SPI_CPOL)) << SSPCR0_MODE_SHIFT;
cr0               163 drivers/spi/spi-ep93xx.c 	cr0 |= dss;
cr0               167 drivers/spi/spi-ep93xx.c 	dev_dbg(&master->dev, "setup: cr0 %#x\n", cr0);
cr0               170 drivers/spi/spi-ep93xx.c 	writel(cr0, espi->mmio + SSPCR0);
cr0               418 drivers/spi/spi-pl022.c 	u32 cr0;
cr0               563 drivers/spi/spi-pl022.c 		writel(chip->cr0, SSP_CR0(pl022->virtbase));
cr0               565 drivers/spi/spi-pl022.c 		writew(chip->cr0, SSP_CR0(pl022->virtbase));
cr0              1973 drivers/spi/spi-pl022.c 	chip->cr0 = 0;
cr0              2006 drivers/spi/spi-pl022.c 			SSP_WRITE_BITS(chip->cr0, chip_info->duplex,
cr0              2008 drivers/spi/spi-pl022.c 			SSP_WRITE_BITS(chip->cr0, chip_info->ctrl_len,
cr0              2010 drivers/spi/spi-pl022.c 			SSP_WRITE_BITS(chip->cr0, chip_info->iface,
cr0              2015 drivers/spi/spi-pl022.c 		SSP_WRITE_BITS(chip->cr0, bits - 1,
cr0              2032 drivers/spi/spi-pl022.c 		SSP_WRITE_BITS(chip->cr0, bits - 1,
cr0              2034 drivers/spi/spi-pl022.c 		SSP_WRITE_BITS(chip->cr0, chip_info->iface,
cr0              2043 drivers/spi/spi-pl022.c 	SSP_WRITE_BITS(chip->cr0, tmp, SSP_CR0_MASK_SPO, 6);
cr0              2049 drivers/spi/spi-pl022.c 	SSP_WRITE_BITS(chip->cr0, tmp, SSP_CR0_MASK_SPH, 7);
cr0              2051 drivers/spi/spi-pl022.c 	SSP_WRITE_BITS(chip->cr0, clk_freq.scr, SSP_CR0_MASK_SCR, 8);
cr0               953 drivers/spi/spi-pxa2xx.c 	u32 cr0;
cr0              1054 drivers/spi/spi-pxa2xx.c 	cr0 = pxa2xx_configure_sscr0(drv_data, clk_div, bits);
cr0              1058 drivers/spi/spi-pxa2xx.c 				/ (1 + ((cr0 & SSCR0_SCR(0xfff)) >> 8)),
cr0              1063 drivers/spi/spi-pxa2xx.c 				/ (1 + ((cr0 & SSCR0_SCR(0x0ff)) >> 8)),
cr0              1082 drivers/spi/spi-pxa2xx.c 	if ((pxa2xx_spi_read(drv_data, SSCR0) != cr0)
cr0              1086 drivers/spi/spi-pxa2xx.c 		pxa2xx_spi_write(drv_data, SSCR0, cr0 & ~SSCR0_SSE);
cr0              1092 drivers/spi/spi-pxa2xx.c 		pxa2xx_spi_write(drv_data, SSCR0, cr0);
cr0               462 drivers/spi/spi-rockchip.c 	u32 cr0 = CR0_FRF_SPI  << CR0_FRF_OFFSET
cr0               469 drivers/spi/spi-rockchip.c 	cr0 |= rs->rsd << CR0_RSD_OFFSET;
cr0               470 drivers/spi/spi-rockchip.c 	cr0 |= (spi->mode & 0x3U) << CR0_SCPH_OFFSET;
cr0               472 drivers/spi/spi-rockchip.c 		cr0 |= CR0_FBM_LSB << CR0_FBM_OFFSET;
cr0               475 drivers/spi/spi-rockchip.c 		cr0 |= CR0_XFM_TR << CR0_XFM_OFFSET;
cr0               477 drivers/spi/spi-rockchip.c 		cr0 |= CR0_XFM_RO << CR0_XFM_OFFSET;
cr0               479 drivers/spi/spi-rockchip.c 		cr0 |= CR0_XFM_TO << CR0_XFM_OFFSET;
cr0               483 drivers/spi/spi-rockchip.c 		cr0 |= CR0_DFS_4BIT << CR0_DFS_OFFSET;
cr0               487 drivers/spi/spi-rockchip.c 		cr0 |= CR0_DFS_8BIT << CR0_DFS_OFFSET;
cr0               491 drivers/spi/spi-rockchip.c 		cr0 |= CR0_DFS_16BIT << CR0_DFS_OFFSET;
cr0               509 drivers/spi/spi-rockchip.c 	writel_relaxed(cr0, rs->regs + ROCKCHIP_SPI_CTRLR0);
cr0               210 drivers/spi/spi-txx9.c 			u32 cr0;
cr0               217 drivers/spi/spi-txx9.c 			cr0 = txx9spi_rd(c, TXx9_SPCR0);
cr0               218 drivers/spi/spi-txx9.c 			cr0 &= ~TXx9_SPCR0_RXIFL_MASK;
cr0               219 drivers/spi/spi-txx9.c 			cr0 |= (count - 1) << 12;
cr0               221 drivers/spi/spi-txx9.c 			cr0 |= TXx9_SPCR0_RBSIE;
cr0               222 drivers/spi/spi-txx9.c 			txx9spi_wr(c, cr0, TXx9_SPCR0);
cr0               970 drivers/video/fbdev/sstfb.c 	u8 cr0, cc;
cr0               978 drivers/video/fbdev/sstfb.c 	cr0 = sst_dac_read(DACREG_RMR);	/* 5 CR0 */
cr0               985 drivers/video/fbdev/sstfb.c 	sst_dac_write(DACREG_RMR, (cr0 & 0xf0)
cr0              1016 drivers/video/fbdev/sstfb.c 	            cr0 & ~DACREG_CR0_PWDOWN & ~DACREG_CR0_EN_INDEXED);
cr0              1061 drivers/video/fbdev/sstfb.c 	u8 cr0;
cr0              1069 drivers/video/fbdev/sstfb.c 	cr0 = sst_dac_read(DACREG_RMR);
cr0              1079 drivers/video/fbdev/sstfb.c 		sst_dac_write(DACREG_RMR, (cr0 & 0x0f) | DACREG_CR0_16BPP);
cr0              2744 fs/cifs/cifspdu.h 	char cr0;         /* \n */
cr0                40 include/xen/interface/hvm/hvm_vcpu.h     uint32_t cr0;
cr0               103 include/xen/interface/hvm/hvm_vcpu.h     uint64_t cr0;
cr0                46 sound/soc/pxa/pxa-ssp.c 	uint32_t	cr0;
cr0               144 sound/soc/pxa/pxa-ssp.c 	priv->cr0 = __raw_readl(ssp->mmio_base + SSCR0);
cr0               163 sound/soc/pxa/pxa-ssp.c 	__raw_writel(priv->cr0 & ~SSCR0_SSE, ssp->mmio_base + SSCR0);
cr0               154 tools/arch/x86/include/uapi/asm/kvm.h 	__u64 cr0, cr2, cr3, cr4, cr8;
cr0               191 tools/testing/selftests/kvm/include/x86_64/processor.h 	uint64_t cr0;
cr0               194 tools/testing/selftests/kvm/include/x86_64/processor.h 			     : /* output */ [cr0]"=r"(cr0));
cr0               195 tools/testing/selftests/kvm/include/x86_64/processor.h 	return cr0;
cr0               216 tools/testing/selftests/kvm/lib/x86_64/processor.c 		sregs->cr0, sregs->cr2, sregs->cr3, sregs->cr4);
cr0               625 tools/testing/selftests/kvm/lib/x86_64/processor.c 		sregs.cr0 = X86_CR0_PE | X86_CR0_NE | X86_CR0_PG;
cr0               137 tools/testing/selftests/kvm/lib/x86_64/vmx.c 	unsigned long cr0;
cr0               145 tools/testing/selftests/kvm/lib/x86_64/vmx.c 	__asm__ __volatile__("mov %%cr0, %0" : "=r"(cr0) : : "memory");
cr0               146 tools/testing/selftests/kvm/lib/x86_64/vmx.c 	cr0 &= rdmsr(MSR_IA32_VMX_CR0_FIXED1);
cr0               147 tools/testing/selftests/kvm/lib/x86_64/vmx.c 	cr0 |= rdmsr(MSR_IA32_VMX_CR0_FIXED0);
cr0               148 tools/testing/selftests/kvm/lib/x86_64/vmx.c 	__asm__ __volatile__("mov %0, %%cr0" : : "r"(cr0) : "memory");