vsm               216 arch/arm/vfp/vfpsingle.c 		  struct vfp_single *vsm, u32 fpscr)
vsm               223 arch/arm/vfp/vfpsingle.c 	if (vsm)
vsm               224 arch/arm/vfp/vfpsingle.c 		tm = vfp_single_type(vsm);
vsm               240 arch/arm/vfp/vfpsingle.c 			nan = vsm;
vsm               318 arch/arm/vfp/vfpsingle.c 	struct vfp_single vsm, vsd;
vsm               321 arch/arm/vfp/vfpsingle.c 	vfp_single_unpack(&vsm, m);
vsm               322 arch/arm/vfp/vfpsingle.c 	tm = vfp_single_type(&vsm);
vsm               327 arch/arm/vfp/vfpsingle.c 			ret = vfp_propagate_nan(vsp, &vsm, NULL, fpscr);
vsm               328 arch/arm/vfp/vfpsingle.c 		else if (vsm.sign == 0) {
vsm               330 arch/arm/vfp/vfpsingle.c 			vsp = &vsm;
vsm               351 arch/arm/vfp/vfpsingle.c 		vfp_single_normalise_denormal(&vsm);
vsm               356 arch/arm/vfp/vfpsingle.c 	if (vsm.sign)
vsm               359 arch/arm/vfp/vfpsingle.c 	vfp_single_dump("sqrt", &vsm);
vsm               365 arch/arm/vfp/vfpsingle.c 	vsd.exponent = ((vsm.exponent - 127) >> 1) + 127;
vsm               366 arch/arm/vfp/vfpsingle.c 	vsd.significand = vfp_estimate_sqrt_significand(vsm.exponent, vsm.significand) + 2;
vsm               379 arch/arm/vfp/vfpsingle.c 			vsm.significand <<= !(vsm.exponent & 1);
vsm               381 arch/arm/vfp/vfpsingle.c 			rem = ((u64)vsm.significand << 32) - term;
vsm               484 arch/arm/vfp/vfpsingle.c 	struct vfp_single vsm;
vsm               489 arch/arm/vfp/vfpsingle.c 	vfp_single_unpack(&vsm, m);
vsm               491 arch/arm/vfp/vfpsingle.c 	tm = vfp_single_type(&vsm);
vsm               500 arch/arm/vfp/vfpsingle.c 		vfp_single_normalise_denormal(&vsm);
vsm               502 arch/arm/vfp/vfpsingle.c 	vdd.sign = vsm.sign;
vsm               503 arch/arm/vfp/vfpsingle.c 	vdd.significand = (u64)vsm.significand << 32;
vsm               516 arch/arm/vfp/vfpsingle.c 		vdd.exponent = vsm.exponent + (1023 - 127);
vsm               549 arch/arm/vfp/vfpsingle.c 	struct vfp_single vsm;
vsm               554 arch/arm/vfp/vfpsingle.c 	vfp_single_unpack(&vsm, m);
vsm               555 arch/arm/vfp/vfpsingle.c 	vfp_single_dump("VSM", &vsm);
vsm               560 arch/arm/vfp/vfpsingle.c 	tm = vfp_single_type(&vsm);
vsm               565 arch/arm/vfp/vfpsingle.c 		vsm.sign = 0;
vsm               567 arch/arm/vfp/vfpsingle.c 	if (vsm.exponent >= 127 + 32) {
vsm               568 arch/arm/vfp/vfpsingle.c 		d = vsm.sign ? 0 : 0xffffffff;
vsm               570 arch/arm/vfp/vfpsingle.c 	} else if (vsm.exponent >= 127 - 1) {
vsm               571 arch/arm/vfp/vfpsingle.c 		int shift = 127 + 31 - vsm.exponent;
vsm               577 arch/arm/vfp/vfpsingle.c 		d = (vsm.significand << 1) >> shift;
vsm               578 arch/arm/vfp/vfpsingle.c 		rem = vsm.significand << (33 - shift);
vsm               586 arch/arm/vfp/vfpsingle.c 		} else if ((rmode == FPSCR_ROUND_PLUSINF) ^ (vsm.sign != 0)) {
vsm               597 arch/arm/vfp/vfpsingle.c 		if (d && vsm.sign) {
vsm               604 arch/arm/vfp/vfpsingle.c 		if (vsm.exponent | vsm.significand) {
vsm               606 arch/arm/vfp/vfpsingle.c 			if (rmode == FPSCR_ROUND_PLUSINF && vsm.sign == 0)
vsm               608 arch/arm/vfp/vfpsingle.c 			else if (rmode == FPSCR_ROUND_MINUSINF && vsm.sign) {
vsm               629 arch/arm/vfp/vfpsingle.c 	struct vfp_single vsm;
vsm               634 arch/arm/vfp/vfpsingle.c 	vfp_single_unpack(&vsm, m);
vsm               635 arch/arm/vfp/vfpsingle.c 	vfp_single_dump("VSM", &vsm);
vsm               640 arch/arm/vfp/vfpsingle.c 	tm = vfp_single_type(&vsm);
vsm               641 arch/arm/vfp/vfpsingle.c 	if (vfp_single_type(&vsm) & VFP_DENORMAL)
vsm               647 arch/arm/vfp/vfpsingle.c 	} else if (vsm.exponent >= 127 + 32) {
vsm               652 arch/arm/vfp/vfpsingle.c 		if (vsm.sign)
vsm               655 arch/arm/vfp/vfpsingle.c 	} else if (vsm.exponent >= 127 - 1) {
vsm               656 arch/arm/vfp/vfpsingle.c 		int shift = 127 + 31 - vsm.exponent;
vsm               660 arch/arm/vfp/vfpsingle.c 		d = (vsm.significand << 1) >> shift;
vsm               661 arch/arm/vfp/vfpsingle.c 		rem = vsm.significand << (33 - shift);
vsm               669 arch/arm/vfp/vfpsingle.c 		} else if ((rmode == FPSCR_ROUND_PLUSINF) ^ (vsm.sign != 0)) {
vsm               675 arch/arm/vfp/vfpsingle.c 		if (d > 0x7fffffff + (vsm.sign != 0)) {
vsm               676 arch/arm/vfp/vfpsingle.c 			d = 0x7fffffff + (vsm.sign != 0);
vsm               681 arch/arm/vfp/vfpsingle.c 		if (vsm.sign)
vsm               685 arch/arm/vfp/vfpsingle.c 		if (vsm.exponent | vsm.significand) {
vsm               687 arch/arm/vfp/vfpsingle.c 			if (rmode == FPSCR_ROUND_PLUSINF && vsm.sign == 0)
vsm               689 arch/arm/vfp/vfpsingle.c 			else if (rmode == FPSCR_ROUND_MINUSINF && vsm.sign)
vsm               730 arch/arm/vfp/vfpsingle.c 			  struct vfp_single *vsm, u32 fpscr)
vsm               737 arch/arm/vfp/vfpsingle.c 	tm = vfp_single_type(vsm);
vsm               743 arch/arm/vfp/vfpsingle.c 		if (vsn->sign ^ vsm->sign) {
vsm               764 arch/arm/vfp/vfpsingle.c 		return vfp_propagate_nan(vsd, vsn, vsm, fpscr);
vsm               772 arch/arm/vfp/vfpsingle.c 	       struct vfp_single *vsm, u32 fpscr)
vsm               777 arch/arm/vfp/vfpsingle.c 	    vsm->significand & 0x80000000) {
vsm               780 arch/arm/vfp/vfpsingle.c 		vfp_single_dump("VSM", vsm);
vsm               788 arch/arm/vfp/vfpsingle.c 	if (vsn->exponent < vsm->exponent) {
vsm               790 arch/arm/vfp/vfpsingle.c 		vsn = vsm;
vsm               791 arch/arm/vfp/vfpsingle.c 		vsm = t;
vsm               799 arch/arm/vfp/vfpsingle.c 		return vfp_single_fadd_nonnumber(vsd, vsn, vsm, fpscr);
vsm               811 arch/arm/vfp/vfpsingle.c 	exp_diff = vsn->exponent - vsm->exponent;
vsm               812 arch/arm/vfp/vfpsingle.c 	m_sig = vfp_shiftright32jamming(vsm->significand, exp_diff);
vsm               817 arch/arm/vfp/vfpsingle.c 	if (vsn->sign ^ vsm->sign) {
vsm               835 arch/arm/vfp/vfpsingle.c vfp_single_multiply(struct vfp_single *vsd, struct vfp_single *vsn, struct vfp_single *vsm, u32 fpscr)
vsm               838 arch/arm/vfp/vfpsingle.c 	vfp_single_dump("VSM", vsm);
vsm               845 arch/arm/vfp/vfpsingle.c 	if (vsn->exponent < vsm->exponent) {
vsm               847 arch/arm/vfp/vfpsingle.c 		vsn = vsm;
vsm               848 arch/arm/vfp/vfpsingle.c 		vsm = t;
vsm               852 arch/arm/vfp/vfpsingle.c 	vsd->sign = vsn->sign ^ vsm->sign;
vsm               858 arch/arm/vfp/vfpsingle.c 		if (vsn->significand || (vsm->exponent == 255 && vsm->significand))
vsm               859 arch/arm/vfp/vfpsingle.c 			return vfp_propagate_nan(vsd, vsn, vsm, fpscr);
vsm               860 arch/arm/vfp/vfpsingle.c 		if ((vsm->exponent | vsm->significand) == 0) {
vsm               873 arch/arm/vfp/vfpsingle.c 	if ((vsm->exponent | vsm->significand) == 0) {
vsm               884 arch/arm/vfp/vfpsingle.c 	vsd->exponent = vsn->exponent + vsm->exponent - 127 + 2;
vsm               885 arch/arm/vfp/vfpsingle.c 	vsd->significand = vfp_hi64to32jamming((u64)vsn->significand * vsm->significand);
vsm               897 arch/arm/vfp/vfpsingle.c 	struct vfp_single vsd, vsp, vsn, vsm;
vsm               907 arch/arm/vfp/vfpsingle.c 	vfp_single_unpack(&vsm, m);
vsm               908 arch/arm/vfp/vfpsingle.c 	if (vsm.exponent == 0 && vsm.significand)
vsm               909 arch/arm/vfp/vfpsingle.c 		vfp_single_normalise_denormal(&vsm);
vsm               911 arch/arm/vfp/vfpsingle.c 	exceptions = vfp_single_multiply(&vsp, &vsn, &vsm, fpscr);
vsm               969 arch/arm/vfp/vfpsingle.c 	struct vfp_single vsd, vsn, vsm;
vsm               979 arch/arm/vfp/vfpsingle.c 	vfp_single_unpack(&vsm, m);
vsm               980 arch/arm/vfp/vfpsingle.c 	if (vsm.exponent == 0 && vsm.significand)
vsm               981 arch/arm/vfp/vfpsingle.c 		vfp_single_normalise_denormal(&vsm);
vsm               983 arch/arm/vfp/vfpsingle.c 	exceptions = vfp_single_multiply(&vsd, &vsn, &vsm, fpscr);
vsm               992 arch/arm/vfp/vfpsingle.c 	struct vfp_single vsd, vsn, vsm;
vsm              1002 arch/arm/vfp/vfpsingle.c 	vfp_single_unpack(&vsm, m);
vsm              1003 arch/arm/vfp/vfpsingle.c 	if (vsm.exponent == 0 && vsm.significand)
vsm              1004 arch/arm/vfp/vfpsingle.c 		vfp_single_normalise_denormal(&vsm);
vsm              1006 arch/arm/vfp/vfpsingle.c 	exceptions = vfp_single_multiply(&vsd, &vsn, &vsm, fpscr);
vsm              1016 arch/arm/vfp/vfpsingle.c 	struct vfp_single vsd, vsn, vsm;
vsm              1029 arch/arm/vfp/vfpsingle.c 	vfp_single_unpack(&vsm, m);
vsm              1030 arch/arm/vfp/vfpsingle.c 	if (vsm.exponent == 0 && vsm.significand)
vsm              1031 arch/arm/vfp/vfpsingle.c 		vfp_single_normalise_denormal(&vsm);
vsm              1033 arch/arm/vfp/vfpsingle.c 	exceptions = vfp_single_add(&vsd, &vsn, &vsm, fpscr);
vsm              1054 arch/arm/vfp/vfpsingle.c 	struct vfp_single vsd, vsn, vsm;
vsm              1062 arch/arm/vfp/vfpsingle.c 	vfp_single_unpack(&vsm, m);
vsm              1064 arch/arm/vfp/vfpsingle.c 	vsd.sign = vsn.sign ^ vsm.sign;
vsm              1067 arch/arm/vfp/vfpsingle.c 	tm = vfp_single_type(&vsm);
vsm              1109 arch/arm/vfp/vfpsingle.c 		vfp_single_normalise_denormal(&vsm);
vsm              1114 arch/arm/vfp/vfpsingle.c 	vsd.exponent = vsn.exponent - vsm.exponent + 127 - 1;
vsm              1115 arch/arm/vfp/vfpsingle.c 	vsm.significand <<= 1;
vsm              1116 arch/arm/vfp/vfpsingle.c 	if (vsm.significand <= (2 * vsn.significand)) {
vsm              1122 arch/arm/vfp/vfpsingle.c 		do_div(significand, vsm.significand);
vsm              1126 arch/arm/vfp/vfpsingle.c 		vsd.significand |= ((u64)vsm.significand * vsd.significand != (u64)vsn.significand << 32);
vsm              1131 arch/arm/vfp/vfpsingle.c 	exceptions = vfp_propagate_nan(&vsd, &vsn, &vsm, fpscr);
vsm              1137 arch/arm/vfp/vfpsingle.c 	exceptions = vfp_propagate_nan(&vsd, &vsm, &vsn, fpscr);