xstate 108 arch/sh/include/asm/processor_32.h union thread_xstate *xstate; xstate 110 arch/sh/include/asm/processor_64.h union thread_xstate *xstate; xstate 22 arch/sh/kernel/cpu/fpu.c if (!tsk->thread.xstate) { xstate 23 arch/sh/kernel/cpu/fpu.c tsk->thread.xstate = kmem_cache_alloc(task_xstate_cachep, xstate 25 arch/sh/kernel/cpu/fpu.c if (!tsk->thread.xstate) xstate 30 arch/sh/kernel/cpu/fpu.c struct sh_fpu_hard_struct *fp = &tsk->thread.xstate->hardfpu; xstate 34 arch/sh/kernel/cpu/fpu.c struct sh_fpu_soft_struct *fp = &tsk->thread.xstate->softfpu; xstate 52 arch/sh/kernel/cpu/sh2a/fpu.c : "0" ((char *)(&tsk->thread.xstate->hardfpu.status)), xstate 84 arch/sh/kernel/cpu/sh2a/fpu.c : "0" (tsk->thread.xstate), "r" (FPSCR_RCHG) xstate 456 arch/sh/kernel/cpu/sh2a/fpu.c if ((tsk->thread.xstate->hardfpu.fpscr & FPSCR_FPU_ERROR)) { xstate 458 arch/sh/kernel/cpu/sh2a/fpu.c denormal_to_double (&tsk->thread.xstate->hardfpu, xstate 473 arch/sh/kernel/cpu/sh2a/fpu.c hx = tsk->thread.xstate->hardfpu.fp_regs[n]; xstate 474 arch/sh/kernel/cpu/sh2a/fpu.c hy = tsk->thread.xstate->hardfpu.fp_regs[m]; xstate 475 arch/sh/kernel/cpu/sh2a/fpu.c fpscr = tsk->thread.xstate->hardfpu.fpscr; xstate 485 arch/sh/kernel/cpu/sh2a/fpu.c | tsk->thread.xstate->hardfpu.fp_regs[n+1]; xstate 487 arch/sh/kernel/cpu/sh2a/fpu.c | tsk->thread.xstate->hardfpu.fp_regs[m+1]; xstate 492 arch/sh/kernel/cpu/sh2a/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n] = llx >> 32; xstate 493 arch/sh/kernel/cpu/sh2a/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n+1] = llx & 0xffffffff; xstate 502 arch/sh/kernel/cpu/sh2a/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n] = hx; xstate 516 arch/sh/kernel/cpu/sh2a/fpu.c hx = tsk->thread.xstate->hardfpu.fp_regs[n]; xstate 517 arch/sh/kernel/cpu/sh2a/fpu.c hy = tsk->thread.xstate->hardfpu.fp_regs[m]; xstate 518 arch/sh/kernel/cpu/sh2a/fpu.c fpscr = tsk->thread.xstate->hardfpu.fpscr; xstate 528 arch/sh/kernel/cpu/sh2a/fpu.c | tsk->thread.xstate->hardfpu.fp_regs[n+1]; xstate 530 arch/sh/kernel/cpu/sh2a/fpu.c | tsk->thread.xstate->hardfpu.fp_regs[m+1]; xstate 535 arch/sh/kernel/cpu/sh2a/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n] = llx >> 32; xstate 536 arch/sh/kernel/cpu/sh2a/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n+1] = llx & 0xffffffff; xstate 545 arch/sh/kernel/cpu/sh2a/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n] = hx; xstate 563 arch/sh/kernel/cpu/sh2a/fpu.c tsk->thread.xstate->hardfpu.fpscr &= xstate 84 arch/sh/kernel/cpu/sh4/fpu.c :"0"((char *)(&tsk->thread.xstate->hardfpu.status)), xstate 134 arch/sh/kernel/cpu/sh4/fpu.c :"0" (tsk->thread.xstate), "r" (FPSCR_RCHG) xstate 230 arch/sh/kernel/cpu/sh4/fpu.c if ((tsk->thread.xstate->hardfpu.fpscr & FPSCR_CAUSE_ERROR)) xstate 232 arch/sh/kernel/cpu/sh4/fpu.c denormal_to_double(&tsk->thread.xstate->hardfpu, xstate 248 arch/sh/kernel/cpu/sh4/fpu.c hx = tsk->thread.xstate->hardfpu.fp_regs[n]; xstate 249 arch/sh/kernel/cpu/sh4/fpu.c hy = tsk->thread.xstate->hardfpu.fp_regs[m]; xstate 250 arch/sh/kernel/cpu/sh4/fpu.c fpscr = tsk->thread.xstate->hardfpu.fpscr; xstate 260 arch/sh/kernel/cpu/sh4/fpu.c | tsk->thread.xstate->hardfpu.fp_regs[n + 1]; xstate 262 arch/sh/kernel/cpu/sh4/fpu.c | tsk->thread.xstate->hardfpu.fp_regs[m + 1]; xstate 264 arch/sh/kernel/cpu/sh4/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n] = llx >> 32; xstate 265 arch/sh/kernel/cpu/sh4/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n + 1] = llx & 0xffffffff; xstate 271 arch/sh/kernel/cpu/sh4/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n] = hx; xstate 286 arch/sh/kernel/cpu/sh4/fpu.c hx = tsk->thread.xstate->hardfpu.fp_regs[n]; xstate 287 arch/sh/kernel/cpu/sh4/fpu.c hy = tsk->thread.xstate->hardfpu.fp_regs[m]; xstate 288 arch/sh/kernel/cpu/sh4/fpu.c fpscr = tsk->thread.xstate->hardfpu.fpscr; xstate 298 arch/sh/kernel/cpu/sh4/fpu.c | tsk->thread.xstate->hardfpu.fp_regs[n + 1]; xstate 300 arch/sh/kernel/cpu/sh4/fpu.c | tsk->thread.xstate->hardfpu.fp_regs[m + 1]; xstate 305 arch/sh/kernel/cpu/sh4/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n] = llx >> 32; xstate 306 arch/sh/kernel/cpu/sh4/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n + 1] = llx & 0xffffffff; xstate 315 arch/sh/kernel/cpu/sh4/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n] = hx; xstate 330 arch/sh/kernel/cpu/sh4/fpu.c hx = tsk->thread.xstate->hardfpu.fp_regs[n]; xstate 331 arch/sh/kernel/cpu/sh4/fpu.c hy = tsk->thread.xstate->hardfpu.fp_regs[m]; xstate 332 arch/sh/kernel/cpu/sh4/fpu.c fpscr = tsk->thread.xstate->hardfpu.fpscr; xstate 342 arch/sh/kernel/cpu/sh4/fpu.c | tsk->thread.xstate->hardfpu.fp_regs[n + 1]; xstate 344 arch/sh/kernel/cpu/sh4/fpu.c | tsk->thread.xstate->hardfpu.fp_regs[m + 1]; xstate 348 arch/sh/kernel/cpu/sh4/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n] = llx >> 32; xstate 349 arch/sh/kernel/cpu/sh4/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n + 1] = llx & 0xffffffff; xstate 355 arch/sh/kernel/cpu/sh4/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n] = hx; xstate 368 arch/sh/kernel/cpu/sh4/fpu.c hx = tsk->thread.xstate->hardfpu.fp_regs[m]; xstate 370 arch/sh/kernel/cpu/sh4/fpu.c if ((tsk->thread.xstate->hardfpu.fpscr & FPSCR_CAUSE_ERROR) xstate 375 arch/sh/kernel/cpu/sh4/fpu.c llx = ((long long)tsk->thread.xstate->hardfpu.fp_regs[m] << 32) xstate 376 arch/sh/kernel/cpu/sh4/fpu.c | tsk->thread.xstate->hardfpu.fp_regs[m + 1]; xstate 378 arch/sh/kernel/cpu/sh4/fpu.c tsk->thread.xstate->hardfpu.fpul = float64_to_float32(llx); xstate 397 arch/sh/kernel/cpu/sh4/fpu.c int roundingMode = FPSCR_ROUNDING_MODE(tsk->thread.xstate->hardfpu.fpscr); xstate 409 arch/sh/kernel/cpu/sh4/fpu.c tsk->thread.xstate->hardfpu.fpscr &= xstate 411 arch/sh/kernel/cpu/sh4/fpu.c tsk->thread.xstate->hardfpu.fpscr |= fpu_exception_flags; xstate 414 arch/sh/kernel/cpu/sh4/fpu.c tsk->thread.xstate->hardfpu.fpscr |= (fpu_exception_flags >> 10); xstate 418 arch/sh/kernel/cpu/sh4/fpu.c if ((((tsk->thread.xstate->hardfpu.fpscr & FPSCR_ENABLE_MASK) >> 7) & xstate 54 arch/sh/kernel/cpu/sh5/fpu.c : "r" (&tsk->thread.xstate->hardfpu) xstate 97 arch/sh/kernel/cpu/sh5/fpu.c : "r" (&tsk->thread.xstate->hardfpu) xstate 31 arch/sh/kernel/process.c if (src->thread.xstate) { xstate 32 arch/sh/kernel/process.c dst->thread.xstate = kmem_cache_alloc(task_xstate_cachep, xstate 34 arch/sh/kernel/process.c if (!dst->thread.xstate) xstate 36 arch/sh/kernel/process.c memcpy(dst->thread.xstate, src->thread.xstate, xstate_size); xstate 44 arch/sh/kernel/process.c if (tsk->thread.xstate) { xstate 45 arch/sh/kernel/process.c kmem_cache_free(task_xstate_cachep, tsk->thread.xstate); xstate 46 arch/sh/kernel/process.c tsk->thread.xstate = NULL; xstate 185 arch/sh/kernel/process_32.c prefetch(next_t->xstate); xstate 360 arch/sh/kernel/process_64.c memcpy(fpu, &tsk->thread.xstate->hardfpu, sizeof(*fpu)); xstate 197 arch/sh/kernel/ptrace_32.c &target->thread.xstate->hardfpu, 0, -1); xstate 200 arch/sh/kernel/ptrace_32.c &target->thread.xstate->softfpu, 0, -1); xstate 218 arch/sh/kernel/ptrace_32.c &target->thread.xstate->hardfpu, 0, -1); xstate 221 arch/sh/kernel/ptrace_32.c &target->thread.xstate->softfpu, 0, -1); xstate 399 arch/sh/kernel/ptrace_32.c tmp = ((unsigned long *)child->thread.xstate) xstate 434 arch/sh/kernel/ptrace_32.c ((unsigned long *)child->thread.xstate) xstate 89 arch/sh/kernel/ptrace_64.c tmp = ((long *)task->thread.xstate)[addr / sizeof(unsigned long)]; xstate 124 arch/sh/kernel/ptrace_64.c ((long *)task->thread.xstate)[addr / sizeof(unsigned long)] = data; xstate 226 arch/sh/kernel/ptrace_64.c &target->thread.xstate->hardfpu, 0, -1); xstate 243 arch/sh/kernel/ptrace_64.c &target->thread.xstate->hardfpu, 0, -1); xstate 86 arch/sh/kernel/signal_32.c return __copy_from_user(&tsk->thread.xstate->hardfpu, &sc->sc_fpregs[0], xstate 110 arch/sh/kernel/signal_32.c return __copy_to_user(&sc->sc_fpregs[0], &tsk->thread.xstate->hardfpu, xstate 157 arch/sh/kernel/signal_64.c err |= __copy_from_user(¤t->thread.xstate->hardfpu, &sc->sc_fpregs[0], xstate 182 arch/sh/kernel/signal_64.c err |= __copy_to_user(&sc->sc_fpregs[0], ¤t->thread.xstate->hardfpu, xstate 353 arch/sh/kernel/traps_64.c current->thread.xstate->hardfpu.fp_regs[destreg] = buflo; xstate 357 arch/sh/kernel/traps_64.c current->thread.xstate->hardfpu.fp_regs[destreg] = buflo; xstate 358 arch/sh/kernel/traps_64.c current->thread.xstate->hardfpu.fp_regs[destreg+1] = bufhi; xstate 361 arch/sh/kernel/traps_64.c current->thread.xstate->hardfpu.fp_regs[destreg] = bufhi; xstate 362 arch/sh/kernel/traps_64.c current->thread.xstate->hardfpu.fp_regs[destreg+1] = buflo; xstate 364 arch/sh/kernel/traps_64.c current->thread.xstate->hardfpu.fp_regs[destreg] = buflo; xstate 365 arch/sh/kernel/traps_64.c current->thread.xstate->hardfpu.fp_regs[destreg+1] = bufhi; xstate 420 arch/sh/kernel/traps_64.c buflo = current->thread.xstate->hardfpu.fp_regs[srcreg]; xstate 424 arch/sh/kernel/traps_64.c buflo = current->thread.xstate->hardfpu.fp_regs[srcreg]; xstate 425 arch/sh/kernel/traps_64.c bufhi = current->thread.xstate->hardfpu.fp_regs[srcreg+1]; xstate 428 arch/sh/kernel/traps_64.c bufhi = current->thread.xstate->hardfpu.fp_regs[srcreg]; xstate 429 arch/sh/kernel/traps_64.c buflo = current->thread.xstate->hardfpu.fp_regs[srcreg+1]; xstate 431 arch/sh/kernel/traps_64.c buflo = current->thread.xstate->hardfpu.fp_regs[srcreg]; xstate 432 arch/sh/kernel/traps_64.c bufhi = current->thread.xstate->hardfpu.fp_regs[srcreg+1]; xstate 554 arch/sh/math-emu/math.c if ((tsk->thread.xstate->softfpu.fpscr & (1 << 17))) { xstate 556 arch/sh/math-emu/math.c denormal_to_double (&tsk->thread.xstate->softfpu, xstate 558 arch/sh/math-emu/math.c tsk->thread.xstate->softfpu.fpscr &= xstate 598 arch/sh/math-emu/math.c struct sh_fpu_soft_struct *fpu = &(tsk->thread.xstate->softfpu); xstate 274 arch/x86/include/asm/fpu/internal.h static inline void copy_xregs_to_kernel_booting(struct xregs_state *xstate) xstate 284 arch/x86/include/asm/fpu/internal.h XSTATE_OP(XSAVES, xstate, lmask, hmask, err); xstate 286 arch/x86/include/asm/fpu/internal.h XSTATE_OP(XSAVE, xstate, lmask, hmask, err); xstate 296 arch/x86/include/asm/fpu/internal.h static inline void copy_kernel_to_xregs_booting(struct xregs_state *xstate) xstate 306 arch/x86/include/asm/fpu/internal.h XSTATE_OP(XRSTORS, xstate, lmask, hmask, err); xstate 308 arch/x86/include/asm/fpu/internal.h XSTATE_OP(XRSTOR, xstate, lmask, hmask, err); xstate 320 arch/x86/include/asm/fpu/internal.h static inline void copy_xregs_to_kernel(struct xregs_state *xstate) xstate 329 arch/x86/include/asm/fpu/internal.h XSTATE_XSAVE(xstate, lmask, hmask, err); xstate 338 arch/x86/include/asm/fpu/internal.h static inline void copy_kernel_to_xregs(struct xregs_state *xstate, u64 mask) xstate 343 arch/x86/include/asm/fpu/internal.h XSTATE_XRESTORE(xstate, lmask, hmask); xstate 380 arch/x86/include/asm/fpu/internal.h struct xregs_state *xstate = ((__force struct xregs_state *)buf); xstate 386 arch/x86/include/asm/fpu/internal.h XSTATE_OP(XRSTOR, xstate, lmask, hmask, err); xstate 396 arch/x86/include/asm/fpu/internal.h static inline int copy_kernel_to_xregs_err(struct xregs_state *xstate, u64 mask) xstate 402 arch/x86/include/asm/fpu/internal.h XSTATE_OP(XRSTOR, xstate, lmask, hmask, err); xstate 123 drivers/net/ppp/ppp_generic.c unsigned int xstate; /* transmit state bits 68 */ xstate 675 drivers/net/ppp/ppp_generic.c val = ppp->flags | ppp->xstate | ppp->rstate; xstate 1580 drivers/net/ppp/ppp_generic.c if ((ppp->xstate & SC_COMP_RUN) && ppp->xc_state && xstate 2768 drivers/net/ppp/ppp_generic.c ppp->xstate &= ~SC_COMP_RUN; xstate 2831 drivers/net/ppp/ppp_generic.c ppp->xstate &= ~SC_COMP_RUN; xstate 2844 drivers/net/ppp/ppp_generic.c ppp->xstate &= ~SC_COMP_RUN; xstate 2872 drivers/net/ppp/ppp_generic.c ppp->xstate |= SC_COMP_RUN; xstate 2886 drivers/net/ppp/ppp_generic.c if (ppp->xc_state && (ppp->xstate & SC_COMP_RUN)) xstate 2897 drivers/net/ppp/ppp_generic.c void *xstate, *rstate; xstate 2902 drivers/net/ppp/ppp_generic.c ppp->xstate = 0; xstate 2904 drivers/net/ppp/ppp_generic.c xstate = ppp->xc_state; xstate 2912 drivers/net/ppp/ppp_generic.c if (xstate) { xstate 2913 drivers/net/ppp/ppp_generic.c xcomp->comp_free(xstate);