vdm               208 arch/arm/vfp/vfpdouble.c 		  struct vfp_double *vdm, u32 fpscr)
vdm               215 arch/arm/vfp/vfpdouble.c 	if (vdm)
vdm               216 arch/arm/vfp/vfpdouble.c 		tm = vfp_double_type(vdm);
vdm               232 arch/arm/vfp/vfpdouble.c 			nan = vdm;
vdm               270 arch/arm/vfp/vfpdouble.c 	struct vfp_double vdm, vdd;
vdm               273 arch/arm/vfp/vfpdouble.c 	vfp_double_unpack(&vdm, vfp_get_double(dm));
vdm               274 arch/arm/vfp/vfpdouble.c 	tm = vfp_double_type(&vdm);
vdm               279 arch/arm/vfp/vfpdouble.c 			ret = vfp_propagate_nan(vdp, &vdm, NULL, fpscr);
vdm               280 arch/arm/vfp/vfpdouble.c 		else if (vdm.sign == 0) {
vdm               282 arch/arm/vfp/vfpdouble.c 			vdp = &vdm;
vdm               303 arch/arm/vfp/vfpdouble.c 		vfp_double_normalise_denormal(&vdm);
vdm               308 arch/arm/vfp/vfpdouble.c 	if (vdm.sign)
vdm               311 arch/arm/vfp/vfpdouble.c 	vfp_double_dump("sqrt", &vdm);
vdm               317 arch/arm/vfp/vfpdouble.c 	vdd.exponent = ((vdm.exponent - 1023) >> 1) + 1023;
vdm               318 arch/arm/vfp/vfpdouble.c 	vdd.significand = (u64)vfp_estimate_sqrt_significand(vdm.exponent, vdm.significand >> 32) << 31;
vdm               322 arch/arm/vfp/vfpdouble.c 	vdm.significand >>= 1 + (vdm.exponent & 1);
vdm               323 arch/arm/vfp/vfpdouble.c 	vdd.significand += 2 + vfp_estimate_div128to64(vdm.significand, 0, vdd.significand);
vdm               335 arch/arm/vfp/vfpdouble.c 			vdm.significand <<= 2;
vdm               337 arch/arm/vfp/vfpdouble.c 			sub128(&remh, &reml, vdm.significand, 0, termh, terml);
vdm               441 arch/arm/vfp/vfpdouble.c 	struct vfp_double vdm;
vdm               446 arch/arm/vfp/vfpdouble.c 	vfp_double_unpack(&vdm, vfp_get_double(dm));
vdm               448 arch/arm/vfp/vfpdouble.c 	tm = vfp_double_type(&vdm);
vdm               457 arch/arm/vfp/vfpdouble.c 		vfp_double_normalise_denormal(&vdm);
vdm               459 arch/arm/vfp/vfpdouble.c 	vsd.sign = vdm.sign;
vdm               460 arch/arm/vfp/vfpdouble.c 	vsd.significand = vfp_hi64to32jamming(vdm.significand);
vdm               473 arch/arm/vfp/vfpdouble.c 		vsd.exponent = vdm.exponent - (1023 - 127);
vdm               484 arch/arm/vfp/vfpdouble.c 	struct vfp_double vdm;
vdm               487 arch/arm/vfp/vfpdouble.c 	vdm.sign = 0;
vdm               488 arch/arm/vfp/vfpdouble.c 	vdm.exponent = 1023 + 63 - 1;
vdm               489 arch/arm/vfp/vfpdouble.c 	vdm.significand = (u64)m;
vdm               491 arch/arm/vfp/vfpdouble.c 	return vfp_double_normaliseround(dd, &vdm, fpscr, 0, "fuito");
vdm               496 arch/arm/vfp/vfpdouble.c 	struct vfp_double vdm;
vdm               499 arch/arm/vfp/vfpdouble.c 	vdm.sign = (m & 0x80000000) >> 16;
vdm               500 arch/arm/vfp/vfpdouble.c 	vdm.exponent = 1023 + 63 - 1;
vdm               501 arch/arm/vfp/vfpdouble.c 	vdm.significand = vdm.sign ? -m : m;
vdm               503 arch/arm/vfp/vfpdouble.c 	return vfp_double_normaliseround(dd, &vdm, fpscr, 0, "fsito");
vdm               508 arch/arm/vfp/vfpdouble.c 	struct vfp_double vdm;
vdm               513 arch/arm/vfp/vfpdouble.c 	vfp_double_unpack(&vdm, vfp_get_double(dm));
vdm               518 arch/arm/vfp/vfpdouble.c 	tm = vfp_double_type(&vdm);
vdm               523 arch/arm/vfp/vfpdouble.c 		vdm.sign = 0;
vdm               525 arch/arm/vfp/vfpdouble.c 	if (vdm.exponent >= 1023 + 32) {
vdm               526 arch/arm/vfp/vfpdouble.c 		d = vdm.sign ? 0 : 0xffffffff;
vdm               528 arch/arm/vfp/vfpdouble.c 	} else if (vdm.exponent >= 1023 - 1) {
vdm               529 arch/arm/vfp/vfpdouble.c 		int shift = 1023 + 63 - vdm.exponent;
vdm               535 arch/arm/vfp/vfpdouble.c 		d = (vdm.significand << 1) >> shift;
vdm               536 arch/arm/vfp/vfpdouble.c 		rem = vdm.significand << (65 - shift);
vdm               544 arch/arm/vfp/vfpdouble.c 		} else if ((rmode == FPSCR_ROUND_PLUSINF) ^ (vdm.sign != 0)) {
vdm               555 arch/arm/vfp/vfpdouble.c 		if (d && vdm.sign) {
vdm               562 arch/arm/vfp/vfpdouble.c 		if (vdm.exponent | vdm.significand) {
vdm               564 arch/arm/vfp/vfpdouble.c 			if (rmode == FPSCR_ROUND_PLUSINF && vdm.sign == 0)
vdm               566 arch/arm/vfp/vfpdouble.c 			else if (rmode == FPSCR_ROUND_MINUSINF && vdm.sign) {
vdm               587 arch/arm/vfp/vfpdouble.c 	struct vfp_double vdm;
vdm               592 arch/arm/vfp/vfpdouble.c 	vfp_double_unpack(&vdm, vfp_get_double(dm));
vdm               593 arch/arm/vfp/vfpdouble.c 	vfp_double_dump("VDM", &vdm);
vdm               598 arch/arm/vfp/vfpdouble.c 	tm = vfp_double_type(&vdm);
vdm               605 arch/arm/vfp/vfpdouble.c 	} else if (vdm.exponent >= 1023 + 32) {
vdm               607 arch/arm/vfp/vfpdouble.c 		if (vdm.sign)
vdm               610 arch/arm/vfp/vfpdouble.c 	} else if (vdm.exponent >= 1023 - 1) {
vdm               611 arch/arm/vfp/vfpdouble.c 		int shift = 1023 + 63 - vdm.exponent;	/* 58 */
vdm               614 arch/arm/vfp/vfpdouble.c 		d = (vdm.significand << 1) >> shift;
vdm               615 arch/arm/vfp/vfpdouble.c 		rem = vdm.significand << (65 - shift);
vdm               623 arch/arm/vfp/vfpdouble.c 		} else if ((rmode == FPSCR_ROUND_PLUSINF) ^ (vdm.sign != 0)) {
vdm               629 arch/arm/vfp/vfpdouble.c 		if (d > 0x7fffffff + (vdm.sign != 0)) {
vdm               630 arch/arm/vfp/vfpdouble.c 			d = 0x7fffffff + (vdm.sign != 0);
vdm               635 arch/arm/vfp/vfpdouble.c 		if (vdm.sign)
vdm               639 arch/arm/vfp/vfpdouble.c 		if (vdm.exponent | vdm.significand) {
vdm               641 arch/arm/vfp/vfpdouble.c 			if (rmode == FPSCR_ROUND_PLUSINF && vdm.sign == 0)
vdm               643 arch/arm/vfp/vfpdouble.c 			else if (rmode == FPSCR_ROUND_MINUSINF && vdm.sign)
vdm               684 arch/arm/vfp/vfpdouble.c 			  struct vfp_double *vdm, u32 fpscr)
vdm               691 arch/arm/vfp/vfpdouble.c 	tm = vfp_double_type(vdm);
vdm               697 arch/arm/vfp/vfpdouble.c 		if (vdn->sign ^ vdm->sign) {
vdm               718 arch/arm/vfp/vfpdouble.c 		return vfp_propagate_nan(vdd, vdn, vdm, fpscr);
vdm               726 arch/arm/vfp/vfpdouble.c 	       struct vfp_double *vdm, u32 fpscr)
vdm               732 arch/arm/vfp/vfpdouble.c 	    vdm->significand & (1ULL << 63)) {
vdm               735 arch/arm/vfp/vfpdouble.c 		vfp_double_dump("VDM", vdm);
vdm               743 arch/arm/vfp/vfpdouble.c 	if (vdn->exponent < vdm->exponent) {
vdm               745 arch/arm/vfp/vfpdouble.c 		vdn = vdm;
vdm               746 arch/arm/vfp/vfpdouble.c 		vdm = t;
vdm               754 arch/arm/vfp/vfpdouble.c 		return vfp_double_fadd_nonnumber(vdd, vdn, vdm, fpscr);
vdm               766 arch/arm/vfp/vfpdouble.c 	exp_diff = vdn->exponent - vdm->exponent;
vdm               767 arch/arm/vfp/vfpdouble.c 	m_sig = vfp_shiftright64jamming(vdm->significand, exp_diff);
vdm               772 arch/arm/vfp/vfpdouble.c 	if (vdn->sign ^ vdm->sign) {
vdm               791 arch/arm/vfp/vfpdouble.c 		    struct vfp_double *vdm, u32 fpscr)
vdm               794 arch/arm/vfp/vfpdouble.c 	vfp_double_dump("VDM", vdm);
vdm               801 arch/arm/vfp/vfpdouble.c 	if (vdn->exponent < vdm->exponent) {
vdm               803 arch/arm/vfp/vfpdouble.c 		vdn = vdm;
vdm               804 arch/arm/vfp/vfpdouble.c 		vdm = t;
vdm               808 arch/arm/vfp/vfpdouble.c 	vdd->sign = vdn->sign ^ vdm->sign;
vdm               814 arch/arm/vfp/vfpdouble.c 		if (vdn->significand || (vdm->exponent == 2047 && vdm->significand))
vdm               815 arch/arm/vfp/vfpdouble.c 			return vfp_propagate_nan(vdd, vdn, vdm, fpscr);
vdm               816 arch/arm/vfp/vfpdouble.c 		if ((vdm->exponent | vdm->significand) == 0) {
vdm               829 arch/arm/vfp/vfpdouble.c 	if ((vdm->exponent | vdm->significand) == 0) {
vdm               840 arch/arm/vfp/vfpdouble.c 	vdd->exponent = vdn->exponent + vdm->exponent - 1023 + 2;
vdm               841 arch/arm/vfp/vfpdouble.c 	vdd->significand = vfp_hi64multiply64(vdn->significand, vdm->significand);
vdm               853 arch/arm/vfp/vfpdouble.c 	struct vfp_double vdd, vdp, vdn, vdm;
vdm               860 arch/arm/vfp/vfpdouble.c 	vfp_double_unpack(&vdm, vfp_get_double(dm));
vdm               861 arch/arm/vfp/vfpdouble.c 	if (vdm.exponent == 0 && vdm.significand)
vdm               862 arch/arm/vfp/vfpdouble.c 		vfp_double_normalise_denormal(&vdm);
vdm               864 arch/arm/vfp/vfpdouble.c 	exceptions = vfp_double_multiply(&vdp, &vdn, &vdm, fpscr);
vdm               920 arch/arm/vfp/vfpdouble.c 	struct vfp_double vdd, vdn, vdm;
vdm               927 arch/arm/vfp/vfpdouble.c 	vfp_double_unpack(&vdm, vfp_get_double(dm));
vdm               928 arch/arm/vfp/vfpdouble.c 	if (vdm.exponent == 0 && vdm.significand)
vdm               929 arch/arm/vfp/vfpdouble.c 		vfp_double_normalise_denormal(&vdm);
vdm               931 arch/arm/vfp/vfpdouble.c 	exceptions = vfp_double_multiply(&vdd, &vdn, &vdm, fpscr);
vdm               940 arch/arm/vfp/vfpdouble.c 	struct vfp_double vdd, vdn, vdm;
vdm               947 arch/arm/vfp/vfpdouble.c 	vfp_double_unpack(&vdm, vfp_get_double(dm));
vdm               948 arch/arm/vfp/vfpdouble.c 	if (vdm.exponent == 0 && vdm.significand)
vdm               949 arch/arm/vfp/vfpdouble.c 		vfp_double_normalise_denormal(&vdm);
vdm               951 arch/arm/vfp/vfpdouble.c 	exceptions = vfp_double_multiply(&vdd, &vdn, &vdm, fpscr);
vdm               962 arch/arm/vfp/vfpdouble.c 	struct vfp_double vdd, vdn, vdm;
vdm               969 arch/arm/vfp/vfpdouble.c 	vfp_double_unpack(&vdm, vfp_get_double(dm));
vdm               970 arch/arm/vfp/vfpdouble.c 	if (vdm.exponent == 0 && vdm.significand)
vdm               971 arch/arm/vfp/vfpdouble.c 		vfp_double_normalise_denormal(&vdm);
vdm               973 arch/arm/vfp/vfpdouble.c 	exceptions = vfp_double_add(&vdd, &vdn, &vdm, fpscr);
vdm               983 arch/arm/vfp/vfpdouble.c 	struct vfp_double vdd, vdn, vdm;
vdm               990 arch/arm/vfp/vfpdouble.c 	vfp_double_unpack(&vdm, vfp_get_double(dm));
vdm               991 arch/arm/vfp/vfpdouble.c 	if (vdm.exponent == 0 && vdm.significand)
vdm               992 arch/arm/vfp/vfpdouble.c 		vfp_double_normalise_denormal(&vdm);
vdm               997 arch/arm/vfp/vfpdouble.c 	vdm.sign = vfp_sign_negate(vdm.sign);
vdm               999 arch/arm/vfp/vfpdouble.c 	exceptions = vfp_double_add(&vdd, &vdn, &vdm, fpscr);
vdm              1009 arch/arm/vfp/vfpdouble.c 	struct vfp_double vdd, vdn, vdm;
vdm              1014 arch/arm/vfp/vfpdouble.c 	vfp_double_unpack(&vdm, vfp_get_double(dm));
vdm              1016 arch/arm/vfp/vfpdouble.c 	vdd.sign = vdn.sign ^ vdm.sign;
vdm              1019 arch/arm/vfp/vfpdouble.c 	tm = vfp_double_type(&vdm);
vdm              1061 arch/arm/vfp/vfpdouble.c 		vfp_double_normalise_denormal(&vdm);
vdm              1066 arch/arm/vfp/vfpdouble.c 	vdd.exponent = vdn.exponent - vdm.exponent + 1023 - 1;
vdm              1067 arch/arm/vfp/vfpdouble.c 	vdm.significand <<= 1;
vdm              1068 arch/arm/vfp/vfpdouble.c 	if (vdm.significand <= (2 * vdn.significand)) {
vdm              1072 arch/arm/vfp/vfpdouble.c 	vdd.significand = vfp_estimate_div128to64(vdn.significand, 0, vdm.significand);
vdm              1075 arch/arm/vfp/vfpdouble.c 		mul64to128(&termh, &terml, vdm.significand, vdd.significand);
vdm              1079 arch/arm/vfp/vfpdouble.c 			add128(&remh, &reml, remh, reml, 0, vdm.significand);
vdm              1086 arch/arm/vfp/vfpdouble.c 	exceptions = vfp_propagate_nan(&vdd, &vdn, &vdm, fpscr);
vdm              1092 arch/arm/vfp/vfpdouble.c 	exceptions = vfp_propagate_nan(&vdd, &vdm, &vdn, fpscr);
vdm               314 drivers/usb/typec/altmodes/displayport.c 	.vdm = dp_altmode_vdm,
vdm               185 drivers/usb/typec/bus.c 	if (!pdev->ops || !pdev->ops->vdm)
vdm               188 drivers/usb/typec/bus.c 	return pdev->ops->vdm(pdev, header, vdo, count);
vdm              1535 drivers/usb/typec/tcpm/tcpm.c 	.vdm = tcpm_altmode_vdm,
vdm               246 drivers/usb/typec/ucsi/displayport.c 	.vdm = ucsi_displayport_vdm,
vdm                61 include/linux/usb/typec_altmode.h 	int (*vdm)(struct typec_altmode *altmode, const u32 hdr,