This is the mail archive of the libc-alpha@sourceware.org mailing list for the glibc project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

PowerPC floating point little-endian [12 of 15]


Fixes for little-endian in 32-bit assembly.

	2013-07-10  Alan Modra <amodra@gmail.com>

	* sysdeps/powerpc/powerpc32/fpu/s_copysign.S: Load little-endian
	words of double from correct stack offsets.
	* sysdeps/powerpc/powerpc32/fpu/s_copysignl.S: Likewise.
	* sysdeps/powerpc/powerpc32/fpu/s_lrint.S: Likewise.
	* sysdeps/powerpc/powerpc32/fpu/s_lround.S: Likewise.
	* sysdeps/powerpc/powerpc32/power4/fpu/s_llrint.S: Likewise.
	* sysdeps/powerpc/powerpc32/power4/fpu/s_llrintf.S: Likewise.
	* sysdeps/powerpc/powerpc32/power5+/fpu/s_llround.S: Likewise.
	* sysdeps/powerpc/powerpc32/power5+/fpu/s_lround.S: Likewise.
	* sysdeps/powerpc/powerpc32/power5/fpu/s_isnan.S: Likewise.
	* sysdeps/powerpc/powerpc32/power6/fpu/s_isnan.S: Likewise.
	* sysdeps/powerpc/powerpc32/power6/fpu/s_llrint.S: Likewise.
	* sysdeps/powerpc/powerpc32/power6/fpu/s_llround.S: Likewise.
	* sysdeps/powerpc/powerpc32/power7/fpu/s_finite.S: Likewise.
	* sysdeps/powerpc/powerpc32/power7/fpu/s_isinf.S: Likewise.
	* sysdeps/powerpc/powerpc32/power7/fpu/s_isnan.S: Likewise.

diff --git a/sysdeps/powerpc/powerpc32/fpu/s_copysign.S b/sysdeps/powerpc/powerpc32/fpu/s_copysign.S
index 840891f..2dc1d24 100644
--- a/sysdeps/powerpc/powerpc32/fpu/s_copysign.S
+++ b/sysdeps/powerpc/powerpc32/fpu/s_copysign.S
@@ -29,7 +29,11 @@ ENTRY(__copysign)
 	stwu	r1,-16(r1)
 	cfi_adjust_cfa_offset (16)
 	stfd	fp2,8(r1)
+#ifdef __LITTLE_ENDIAN__
+	lwz	r3,12(r1)
+#else
 	lwz	r3,8(r1)
+#endif
 	cmpwi   r3,0
 	addi    r1,r1,16
 	cfi_adjust_cfa_offset (-16)
diff --git a/sysdeps/powerpc/powerpc32/fpu/s_copysignl.S b/sysdeps/powerpc/powerpc32/fpu/s_copysignl.S
index 4ec8389..c58d17f 100644
--- a/sysdeps/powerpc/powerpc32/fpu/s_copysignl.S
+++ b/sysdeps/powerpc/powerpc32/fpu/s_copysignl.S
@@ -30,7 +30,11 @@ ENTRY(__copysignl)
 	fmr	fp0,fp1
 	fabs	fp1,fp1
 	fcmpu	cr7,fp0,fp1
+#ifdef __LITTLE_ENDIAN__
+	lwz	r3,12(r1)
+#else
 	lwz	r3,8(r1)
+#endif
 	cmpwi	cr6,r3,0
 	addi	r1,r1,16
 	cfi_adjust_cfa_offset (-16)
diff --git a/sysdeps/powerpc/powerpc32/fpu/s_lrint.S b/sysdeps/powerpc/powerpc32/fpu/s_lrint.S
index 27881f8..7731645 100644
--- a/sysdeps/powerpc/powerpc32/fpu/s_lrint.S
+++ b/sysdeps/powerpc/powerpc32/fpu/s_lrint.S
@@ -27,7 +27,11 @@ ENTRY (__lrint)
 	nop	/* Insure the following load is in a different dispatch group */
 	nop	/* to avoid pipe stall on POWER4&5.  */
 	nop
+#ifdef __LITTLE_ENDIAN__
+	lwz	r3,8(r1)
+#else
 	lwz	r3,12(r1)
+#endif
 	addi	r1,r1,16
 	blr
 	END (__lrint)
diff --git a/sysdeps/powerpc/powerpc32/fpu/s_lround.S b/sysdeps/powerpc/powerpc32/fpu/s_lround.S
index 92dc378..0c33cb6 100644
--- a/sysdeps/powerpc/powerpc32/fpu/s_lround.S
+++ b/sysdeps/powerpc/powerpc32/fpu/s_lround.S
@@ -67,7 +67,11 @@ ENTRY (__lround)
 	nop	/* Ensure the following load is in a different dispatch  */
 	nop	/* group to avoid pipe stall on POWER4&5.  */
 	nop
+#ifdef __LITTLE_ENDIAN__
+	lwz	r3,8(r1)	/* Load return as integer.  */
+#else
 	lwz	r3,12(r1)	/* Load return as integer.  */
+#endif
 .Lout:
 	addi	r1,r1,16
 	blr
diff --git a/sysdeps/powerpc/powerpc32/power4/fpu/s_llrint.S b/sysdeps/powerpc/powerpc32/power4/fpu/s_llrint.S
index 55b2850..f02fb2b 100644
--- a/sysdeps/powerpc/powerpc32/power4/fpu/s_llrint.S
+++ b/sysdeps/powerpc/powerpc32/power4/fpu/s_llrint.S
@@ -29,8 +29,13 @@ ENTRY (__llrint)
 	nop	/* Insure the following load is in a different dispatch group */
 	nop	/* to avoid pipe stall on POWER4&5.  */
 	nop
+#ifdef __LITTLE_ENDIAN__
+	lwz	r3,12(r1)
+	lwz	r4,8(r1)
+#else
 	lwz	r3,8(r1)
 	lwz	r4,12(r1)
+#endif
 	addi	r1,r1,16
 	blr
 	END (__llrint)
diff --git a/sysdeps/powerpc/powerpc32/power4/fpu/s_llrintf.S b/sysdeps/powerpc/powerpc32/power4/fpu/s_llrintf.S
index cc80fcb..a5db9a9 100644
--- a/sysdeps/powerpc/powerpc32/power4/fpu/s_llrintf.S
+++ b/sysdeps/powerpc/powerpc32/power4/fpu/s_llrintf.S
@@ -28,8 +28,13 @@ ENTRY (__llrintf)
 	nop	/* Insure the following load is in a different dispatch group */
 	nop	/* to avoid pipe stall on POWER4&5.  */
 	nop
+#ifdef __LITTLE_ENDIAN__
+	lwz	r3,12(r1)
+	lwz	r4,8(r1)
+#else
 	lwz	r3,8(r1)
 	lwz	r4,12(r1)
+#endif
 	addi	r1,r1,16
 	blr
 	END (__llrintf)
diff --git a/sysdeps/powerpc/powerpc32/power5+/fpu/s_llround.S b/sysdeps/powerpc/powerpc32/power5+/fpu/s_llround.S
index ecd37c3..1f82687 100644
--- a/sysdeps/powerpc/powerpc32/power5+/fpu/s_llround.S
+++ b/sysdeps/powerpc/powerpc32/power5+/fpu/s_llround.S
@@ -39,8 +39,13 @@ ENTRY (__llround)
 	nop	/* Ensure the following load is in a different dispatch  */
 	nop	/* group to avoid pipe stall on POWER4&5.  */
 	nop
+#ifdef __LITTLE_ENDIAN__
+	lwz	r4,8(r1)
+	lwz	r3,12(r1)
+#else
 	lwz	r4,12(r1)
 	lwz	r3,8(r1)
+#endif
 	addi	r1,r1,16
 	blr
 	END (__llround)
diff --git a/sysdeps/powerpc/powerpc32/power5+/fpu/s_lround.S b/sysdeps/powerpc/powerpc32/power5+/fpu/s_lround.S
index d4da625..03e8bf3 100644
--- a/sysdeps/powerpc/powerpc32/power5+/fpu/s_lround.S
+++ b/sysdeps/powerpc/powerpc32/power5+/fpu/s_lround.S
@@ -38,7 +38,11 @@ ENTRY (__lround)
 	nop	/* Ensure the following load is in a different dispatch  */
 	nop	/* group to avoid pipe stall on POWER4&5.  */
 	nop
+#ifdef __LITTLE_ENDIAN__
+	lwz	r3,8(r1)
+#else
 	lwz	r3,12(r1)
+#endif
 	addi	r1,r1,16
 	blr
 	END (__lround)
diff --git a/sysdeps/powerpc/powerpc32/power5/fpu/s_isnan.S b/sysdeps/powerpc/powerpc32/power5/fpu/s_isnan.S
index f2417fd..4943d47 100644
--- a/sysdeps/powerpc/powerpc32/power5/fpu/s_isnan.S
+++ b/sysdeps/powerpc/powerpc32/power5/fpu/s_isnan.S
@@ -27,8 +27,13 @@ EALIGN (__isnan, 4, 0)
 	ori	r1,r1,0
 	stfd	fp1,24(r1)	/* copy FPR to GPR */
 	ori	r1,r1,0
+#ifdef __LITTLE_ENDIAN__
+	lwz	r4,28(r1)
+	lwz	r5,24(r1)
+#else
 	lwz	r4,24(r1)
 	lwz	r5,28(r1)
+#endif
 	lis	r0,0x7ff0	/* const long r0 0x7ff00000 00000000 */
 	clrlwi	r4,r4,1		/* x = fabs(x) */
 	cmpw	cr7,r4,r0	/* if (fabs(x) =< inf) */
diff --git a/sysdeps/powerpc/powerpc32/power6/fpu/s_isnan.S b/sysdeps/powerpc/powerpc32/power6/fpu/s_isnan.S
index 2c095db..675288c 100644
--- a/sysdeps/powerpc/powerpc32/power6/fpu/s_isnan.S
+++ b/sysdeps/powerpc/powerpc32/power6/fpu/s_isnan.S
@@ -27,8 +27,13 @@ EALIGN (__isnan, 4, 0)
 	ori	r1,r1,0
 	stfd	fp1,24(r1)	/* copy FPR to GPR */
 	ori	r1,r1,0
+#ifdef __LITTLE_ENDIAN__
+	lwz	r4,28(r1)
+	lwz	r5,24(r1)
+#else
 	lwz	r4,24(r1)
 	lwz	r5,28(r1)
+#endif
 	lis	r0,0x7ff0	/* const long r0 0x7ff00000 00000000 */
 	clrlwi	r4,r4,1		/* x = fabs(x) */
 	cmpw	cr7,r4,r0	/* if (fabs(x) =< inf) */
diff --git a/sysdeps/powerpc/powerpc32/power6/fpu/s_llrint.S b/sysdeps/powerpc/powerpc32/power6/fpu/s_llrint.S
index 3344b31..d4ed205 100644
--- a/sysdeps/powerpc/powerpc32/power6/fpu/s_llrint.S
+++ b/sysdeps/powerpc/powerpc32/power6/fpu/s_llrint.S
@@ -29,8 +29,13 @@ ENTRY (__llrint)
 /* Insure the following load is in a different dispatch group by
    inserting "group ending nop".  */
 	ori	r1,r1,0
+#ifdef __LITTLE_ENDIAN__
+	lwz	r3,12(r1)
+	lwz	r4,8(r1)
+#else
 	lwz	r3,8(r1)
 	lwz	r4,12(r1)
+#endif
 	addi	r1,r1,16
 	blr
 	END (__llrint)
diff --git a/sysdeps/powerpc/powerpc32/power6/fpu/s_llround.S b/sysdeps/powerpc/powerpc32/power6/fpu/s_llround.S
index 0ff04cb..8a6d815 100644
--- a/sysdeps/powerpc/powerpc32/power6/fpu/s_llround.S
+++ b/sysdeps/powerpc/powerpc32/power6/fpu/s_llround.S
@@ -39,8 +39,13 @@ ENTRY (__llround)
 /* Insure the following load is in a different dispatch group by
    inserting "group ending nop".  */
 	ori	r1,r1,0
+#ifdef __LITTLE_ENDIAN__
+	lwz	r4,8(r1)
+	lwz	r3,12(r1)
+#else
 	lwz	r4,12(r1)
 	lwz	r3,8(r1)
+#endif
 	addi	r1,r1,16
 	blr
 	END (__llround)
diff --git a/sysdeps/powerpc/powerpc32/power7/fpu/s_finite.S b/sysdeps/powerpc/powerpc32/power7/fpu/s_finite.S
index b2ab5bf..872fcc0 100644
--- a/sysdeps/powerpc/powerpc32/power7/fpu/s_finite.S
+++ b/sysdeps/powerpc/powerpc32/power7/fpu/s_finite.S
@@ -54,9 +54,15 @@ ENTRY (__finite)
 	stfd    fp1,8(r1)     /* Transfer FP to GPR's.  */
 
 	ori	2,2,0	      /* Force a new dispatch group.  */
+#ifdef __LITTLE_ENDIAN__
+	lhz     r0,8+6(r1)    /* Fetch the upper portion of the high word of
+			      the FP value (where the exponent and sign bits
+			      are).  */
+#else
 	lhz     r0,8(r1)      /* Fetch the upper portion of the high word of
 			      the FP value (where the exponent and sign bits
 			      are).  */
+#endif
 	clrlwi	r0,r0,17      /* r0 = abs(r0).  */
 	addi	r1,r1,16      /* Reset the stack pointer.  */
 	cmpwi	cr7,r0,0x7ff0 /* r4 == 0x7ff0?.  */
diff --git a/sysdeps/powerpc/powerpc32/power7/fpu/s_isinf.S b/sysdeps/powerpc/powerpc32/power7/fpu/s_isinf.S
index 3f8af60..6980f29 100644
--- a/sysdeps/powerpc/powerpc32/power7/fpu/s_isinf.S
+++ b/sysdeps/powerpc/powerpc32/power7/fpu/s_isinf.S
@@ -48,14 +48,20 @@ ENTRY (__isinf)
 	li	r3,0
 	bflr    29	      /* If not INF, return.  */
 
-	/* Either we have -INF/+INF or a denormal.  */
+	/* Either we have +INF or -INF.  */
 
 	stwu    r1,-16(r1)    /* Allocate stack space.  */
 	stfd    fp1,8(r1)     /* Transfer FP to GPR's.  */
 	ori	2,2,0	      /* Force a new dispatch group.  */
+#ifdef __LITTLE_ENDIAN__
+	lhz	r4,8+6(r1)    /* Fetch the upper portion of the high word of
+			      the FP value (where the exponent and sign bits
+			      are).  */
+#else
 	lhz	r4,8(r1)      /* Fetch the upper portion of the high word of
 			      the FP value (where the exponent and sign bits
 			      are).  */
+#endif
 	addi	r1,r1,16      /* Reset the stack pointer.  */
 	cmpwi	cr7,r4,0x7ff0 /* r4 == 0x7ff0?  */
 	li	r3,1
diff --git a/sysdeps/powerpc/powerpc32/power7/fpu/s_isnan.S b/sysdeps/powerpc/powerpc32/power7/fpu/s_isnan.S
index 99ff126..ae5d190 100644
--- a/sysdeps/powerpc/powerpc32/power7/fpu/s_isnan.S
+++ b/sysdeps/powerpc/powerpc32/power7/fpu/s_isnan.S
@@ -53,8 +53,13 @@ ENTRY (__isnan)
 	stwu	r1,-16(r1)    /* Allocate stack space.  */
 	stfd	fp1,8(r1)     /* Transfer FP to GPR's.  */
 	ori	2,2,0	      /* Force a new dispatch group.  */
+#ifdef __LITTLE_ENDIAN__
+	lwz     r4,12(r1)     /* Load the upper half of the FP value.  */
+	lwz     r5,8(r1)      /* Load the lower half of the FP value.  */
+#else
 	lwz     r4,8(r1)      /* Load the upper half of the FP value.  */
 	lwz     r5,12(r1)     /* Load the lower half of the FP value.  */
+#endif
 	addi	r1,r1,16      /* Reset the stack pointer.  */
 	lis     r0,0x7ff0     /* Load the upper portion for an INF/NaN.  */
 	clrlwi  r4,r4,1	      /* r4 = abs(r4).  */

-- 
Alan Modra
Australia Development Lab, IBM


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]