This is the mail archive of the libc-alpha@sourceware.org mailing list for the glibc project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

[PATCH] Move sysdeps/x86_64/cacheinfo.c to sysdeps/x86


Hi,

i386 and x86_64 should share the same cacheinfo.c in sysdeps/x86.  This
patch moves sysdeps/x86_64/cacheinfo.c to sysdeps/x86.  Tested on
i686 and x86-64.  OK to install?

Thanks.


H.J.
---
 sysdeps/i386/i686/Makefile                   |  1 +
 sysdeps/i386/i686/cacheinfo.c                | 13 -------
 sysdeps/{x86_64 => x86}/cacheinfo.c          | 56 ++++++++++++++--------------
 sysdeps/x86_64/memcpy.S                      |  6 +--
 sysdeps/x86_64/memset.S                      |  6 +--
 sysdeps/x86_64/multiarch/memcmp-sse4.S       |  4 +-
 sysdeps/x86_64/multiarch/memcpy-ssse3-back.S | 10 ++---
 sysdeps/x86_64/multiarch/memcpy-ssse3.S      | 12 +++---
 9 files changed, 75 insertions(+), 60 deletions(-)
 create mode 100644 ChangeLog.cacheinfo
 delete mode 100644 sysdeps/i386/i686/cacheinfo.c
 rename sysdeps/{x86_64 => x86}/cacheinfo.c (92%)

2013-01-03  H.J. Lu  <hongjiu.lu@intel.com>

	* sysdeps/i386/i686/Makefile (CPPFLAGS-cacheinfo.c): New macro.
	* sysdeps/i386/i686/cacheinfo.c: Removed.
	* sysdeps/x86_64/cacheinfo.c: Moved to ...
	* sysdeps/x86/cacheinfo.c: Here.
	(__x86_64_data_cache_size): Renamed to ...
	(__x86_data_cache_size): This.
	(__x86_64_raw_data_cache_size): Renamed to ...
	(__x86_raw_data_cache_size): This.
	(__x86_64_data_cache_size_half): Renamed to ...
	(__x86_data_cache_size_half): This.
	(__x86_64_raw_data_cache_size_half): Renamed to ...
	(__x86_raw_data_cache_size_half): This.
	(__x86_64_shared_cache_size): Renamed to ...
	(__x86_shared_cache_size): This.
	(__x86_64_raw_shared_cache_size): Renamed to ...
	(__x86_raw_shared_cache_size): This.
	(__x86_64_shared_cache_size_half): Renamed to ...
	(__x86_shared_cache_size_half): This.
	(__x86_64_raw_shared_cache_size_half): Renamed to ...
	(__x86_raw_shared_cache_size_half): This.
	* sysdeps/x86_64/memcpy.S: Updated.
	* sysdeps/x86_64/memset.S: Likewise.
	* sysdeps/x86_64/multiarch/memcmp-sse4.S: Likewise.
	* sysdeps/x86_64/multiarch/memcpy-ssse3-back.S: Likewise.
	* sysdeps/x86_64/multiarch/memcpy-ssse3.S: Likewise.
diff --git a/sysdeps/i386/i686/Makefile b/sysdeps/i386/i686/Makefile
index e6b2924..e087020 100644
--- a/sysdeps/i386/i686/Makefile
+++ b/sysdeps/i386/i686/Makefile
@@ -8,6 +8,7 @@ stack-align-test-flags += -msse
 
 ifeq ($(subdir),string)
 sysdep_routines += cacheinfo
+CPPFLAGS-cacheinfo.c = -DDISABLE_PREFETCHW -DDISABLE_PREFERRED_MEMORY_INSTRUCTION
 endif
 
 ifeq (yes,$(config-asflags-i686))
diff --git a/sysdeps/i386/i686/cacheinfo.c b/sysdeps/i386/i686/cacheinfo.c
deleted file mode 100644
index 3635961..0000000
--- a/sysdeps/i386/i686/cacheinfo.c
+++ /dev/null
@@ -1,13 +0,0 @@
-#define __x86_64_data_cache_size __x86_data_cache_size
-#define __x86_64_raw_data_cache_size __x86_raw_data_cache_size
-#define __x86_64_data_cache_size_half __x86_data_cache_size_half
-#define __x86_64_raw_data_cache_size_half __x86_raw_data_cache_size_half
-#define __x86_64_shared_cache_size __x86_shared_cache_size
-#define __x86_64_raw_shared_cache_size __x86_raw_shared_cache_size
-#define __x86_64_shared_cache_size_half __x86_shared_cache_size_half
-#define __x86_64_raw_shared_cache_size_half __x86_raw_shared_cache_size_half
-
-#define DISABLE_PREFETCHW
-#define DISABLE_PREFERRED_MEMORY_INSTRUCTION
-
-#include <sysdeps/x86_64/cacheinfo.c>
diff --git a/sysdeps/x86_64/cacheinfo.c b/sysdeps/x86/cacheinfo.c
similarity index 92%
rename from sysdeps/x86_64/cacheinfo.c
rename to sysdeps/x86/cacheinfo.c
index 60981ca..5a619c5 100644
--- a/sysdeps/x86_64/cacheinfo.c
+++ b/sysdeps/x86/cacheinfo.c
@@ -1,4 +1,4 @@
-/* x86_64 cache info.
+/* x86 cache info.
    Copyright (C) 2003-2013 Free Software Foundation, Inc.
    This file is part of the GNU C Library.
 
@@ -505,24 +505,24 @@ __cache_sysconf (int name)
 
 /* Data cache size for use in memory and string routines, typically
    L1 size, rounded to multiple of 256 bytes.  */
-long int __x86_64_data_cache_size_half attribute_hidden = 32 * 1024 / 2;
-long int __x86_64_data_cache_size attribute_hidden = 32 * 1024;
-/* Similar to __x86_64_data_cache_size_half, but not rounded.  */
-long int __x86_64_raw_data_cache_size_half attribute_hidden = 32 * 1024 / 2;
-/* Similar to __x86_64_data_cache_size, but not rounded.  */
-long int __x86_64_raw_data_cache_size attribute_hidden = 32 * 1024;
+long int __x86_data_cache_size_half attribute_hidden = 32 * 1024 / 2;
+long int __x86_data_cache_size attribute_hidden = 32 * 1024;
+/* Similar to __x86_data_cache_size_half, but not rounded.  */
+long int __x86_raw_data_cache_size_half attribute_hidden = 32 * 1024 / 2;
+/* Similar to __x86_data_cache_size, but not rounded.  */
+long int __x86_raw_data_cache_size attribute_hidden = 32 * 1024;
 /* Shared cache size for use in memory and string routines, typically
    L2 or L3 size, rounded to multiple of 256 bytes.  */
-long int __x86_64_shared_cache_size_half attribute_hidden = 1024 * 1024 / 2;
-long int __x86_64_shared_cache_size attribute_hidden = 1024 * 1024;
-/* Similar to __x86_64_shared_cache_size_half, but not rounded.  */
-long int __x86_64_raw_shared_cache_size_half attribute_hidden = 1024 * 1024 / 2;
-/* Similar to __x86_64_shared_cache_size, but not rounded.  */
-long int __x86_64_raw_shared_cache_size attribute_hidden = 1024 * 1024;
+long int __x86_shared_cache_size_half attribute_hidden = 1024 * 1024 / 2;
+long int __x86_shared_cache_size attribute_hidden = 1024 * 1024;
+/* Similar to __x86_shared_cache_size_half, but not rounded.  */
+long int __x86_raw_shared_cache_size_half attribute_hidden = 1024 * 1024 / 2;
+/* Similar to __x86_shared_cache_size, but not rounded.  */
+long int __x86_raw_shared_cache_size attribute_hidden = 1024 * 1024;
 
 #ifndef DISABLE_PREFETCHW
 /* PREFETCHW support flag for use in memory and string routines.  */
-int __x86_64_prefetchw attribute_hidden;
+int __x86_prefetchw attribute_hidden;
 #endif
 
 #ifndef DISABLE_PREFERRED_MEMORY_INSTRUCTION
@@ -534,7 +534,7 @@ int __x86_64_prefetchw attribute_hidden;
   3: SSSE3 instructions
 
   */
-int __x86_64_preferred_memory_instruction attribute_hidden;
+int __x86_preferred_memory_instruction attribute_hidden;
 #endif
 
 
@@ -591,9 +591,9 @@ init_cacheinfo (void)
       /* Intel prefers SSSE3 instructions for memory/string routines
 	 if they are available.  */
       if ((ecx & 0x200))
-	__x86_64_preferred_memory_instruction = 3;
+	__x86_preferred_memory_instruction = 3;
       else
-	__x86_64_preferred_memory_instruction = 2;
+	__x86_preferred_memory_instruction = 2;
 #endif
 
       /* Figure out the number of logical threads that share the
@@ -684,9 +684,9 @@ init_cacheinfo (void)
 	 if they are avaiable, otherwise it prefers integer
 	 instructions.  */
       if ((ecx & 0x200))
-	__x86_64_preferred_memory_instruction = 3;
+	__x86_preferred_memory_instruction = 3;
       else
-	__x86_64_preferred_memory_instruction = 0;
+	__x86_preferred_memory_instruction = 0;
 #endif
 
       /* Get maximum extended function. */
@@ -730,28 +730,28 @@ init_cacheinfo (void)
 	  __cpuid (0x80000001, eax, ebx, ecx, edx);
 	  /*  PREFETCHW     || 3DNow!  */
 	  if ((ecx & 0x100) || (edx & 0x80000000))
-	    __x86_64_prefetchw = -1;
+	    __x86_prefetchw = -1;
 	}
 #endif
     }
 
   if (data > 0)
     {
-      __x86_64_raw_data_cache_size_half = data / 2;
-      __x86_64_raw_data_cache_size = data;
+      __x86_raw_data_cache_size_half = data / 2;
+      __x86_raw_data_cache_size = data;
       /* Round data cache size to multiple of 256 bytes.  */
       data = data & ~255L;
-      __x86_64_data_cache_size_half = data / 2;
-      __x86_64_data_cache_size = data;
+      __x86_data_cache_size_half = data / 2;
+      __x86_data_cache_size = data;
     }
 
   if (shared > 0)
     {
-      __x86_64_raw_shared_cache_size_half = shared / 2;
-      __x86_64_raw_shared_cache_size = shared;
+      __x86_raw_shared_cache_size_half = shared / 2;
+      __x86_raw_shared_cache_size = shared;
       /* Round shared cache size to multiple of 256 bytes.  */
       shared = shared & ~255L;
-      __x86_64_shared_cache_size_half = shared / 2;
-      __x86_64_shared_cache_size = shared;
+      __x86_shared_cache_size_half = shared / 2;
+      __x86_shared_cache_size = shared;
     }
 }
diff --git a/sysdeps/x86_64/memcpy.S b/sysdeps/x86_64/memcpy.S
index 5e4dbc7..d6cd553 100644
--- a/sysdeps/x86_64/memcpy.S
+++ b/sysdeps/x86_64/memcpy.S
@@ -254,7 +254,7 @@ L(32after):
 
 L(fasttry):				/* first 1/2 L1 */
 #ifndef NOT_IN_libc			/* only up to this algorithm outside of libc.so */
-	mov	__x86_64_data_cache_size_half(%rip), %R11_LP
+	mov	__x86_data_cache_size_half(%rip), %R11_LP
 	cmpq	%rdx, %r11		/* calculate the smaller of */
 	cmovaq	%rdx, %r11		/* remaining bytes and 1/2 L1 */
 #endif
@@ -303,7 +303,7 @@ L(fastafter):
 /* Handle large blocks smaller than 1/2 L2. */
 
 L(pretry):				/* first 1/2 L2 */
-	mov	__x86_64_shared_cache_size_half (%rip), %R8_LP
+	mov	__x86_shared_cache_size_half (%rip), %R8_LP
 	cmpq	%rdx, %r8		/* calculate the lesser of */
 	cmovaq	%rdx, %r8		/* remaining bytes and 1/2 L2 */
 
@@ -322,7 +322,7 @@ L(pre):					/* 64-byte with prefetching */
 	movq	%rbx, SAVE3(%rsp)
 	cfi_rel_offset (%rbx, SAVE3)
 
-	cmpl	$0, __x86_64_prefetchw(%rip)
+	cmpl	$0, __x86_prefetchw(%rip)
 	jz	L(preloop)		/* check if PREFETCHW OK */
 
 	.p2align 4
diff --git a/sysdeps/x86_64/memset.S b/sysdeps/x86_64/memset.S
index e0d4dfa..f3a4d44 100644
--- a/sysdeps/x86_64/memset.S
+++ b/sysdeps/x86_64/memset.S
@@ -862,7 +862,7 @@ L(SSE15Q0): mov    %rdx,-0xf(%rdi)
 	.balign     16
 L(byte32sse2_pre):
 
-	mov    __x86_64_shared_cache_size(%rip),%r9d  # The largest cache size
+	mov    __x86_shared_cache_size(%rip),%r9d  # The largest cache size
 	cmp    %r9,%r8
 	ja     L(sse2_nt_move_pre)
 	#jmp    L(byte32sse2)
@@ -1205,7 +1205,7 @@ L(SSExDx):
 #ifndef USE_MULTIARCH
 L(aligned_now):
 
-	 cmpl   $0x1,__x86_64_preferred_memory_instruction(%rip)
+	 cmpl   $0x1,__x86_preferred_memory_instruction(%rip)
 	 jg     L(SSE_pre)
 #endif /* USE_MULTIARCH */
 
@@ -1262,7 +1262,7 @@ L(8byte_move_skip):
 
 	.balign     16
 L(8byte_stos_try):
-	mov    __x86_64_shared_cache_size(%rip),%r9d // ck largest cache size
+	mov    __x86_shared_cache_size(%rip),%r9d // ck largest cache size
 	cmpq	%r8,%r9		// calculate the lesser of remaining
 	cmovaq	%r8,%r9		// bytes and largest cache size
 	jbe	L(8byte_stos)
diff --git a/sysdeps/x86_64/multiarch/memcmp-sse4.S b/sysdeps/x86_64/multiarch/memcmp-sse4.S
index 08eade9..1ed4200 100644
--- a/sysdeps/x86_64/multiarch/memcmp-sse4.S
+++ b/sysdeps/x86_64/multiarch/memcmp-sse4.S
@@ -321,7 +321,7 @@ L(512bytesormore):
 # ifdef DATA_CACHE_SIZE_HALF
 	mov	$DATA_CACHE_SIZE_HALF, %R8_LP
 # else
-	mov	__x86_64_data_cache_size_half(%rip), %R8_LP
+	mov	__x86_data_cache_size_half(%rip), %R8_LP
 # endif
 	mov	%r8, %r9
 	shr	$1, %r8
@@ -637,7 +637,7 @@ L(512bytesormorein2aligned):
 # ifdef DATA_CACHE_SIZE_HALF
 	mov	$DATA_CACHE_SIZE_HALF, %R8_LP
 # else
-	mov	__x86_64_data_cache_size_half(%rip), %R8_LP
+	mov	__x86_data_cache_size_half(%rip), %R8_LP
 # endif
 	mov	%r8, %r9
 	shr	$1, %r8
diff --git a/sysdeps/x86_64/multiarch/memcpy-ssse3-back.S b/sysdeps/x86_64/multiarch/memcpy-ssse3-back.S
index 03e18b3..fc9fcef 100644
--- a/sysdeps/x86_64/multiarch/memcpy-ssse3-back.S
+++ b/sysdeps/x86_64/multiarch/memcpy-ssse3-back.S
@@ -108,7 +108,7 @@ L(144bytesormore):
 #ifdef DATA_CACHE_SIZE
 	mov	$DATA_CACHE_SIZE, %RCX_LP
 #else
-	mov	__x86_64_data_cache_size(%rip), %RCX_LP
+	mov	__x86_data_cache_size(%rip), %RCX_LP
 #endif
 	cmp	%rcx, %rdx
 	jae	L(gobble_mem_fwd)
@@ -124,7 +124,7 @@ L(copy_backward):
 #ifdef DATA_CACHE_SIZE
 	mov	$DATA_CACHE_SIZE, %RCX_LP
 #else
-	mov	__x86_64_data_cache_size(%rip), %RCX_LP
+	mov	__x86_data_cache_size(%rip), %RCX_LP
 #endif
 	shl	$1, %rcx
 	cmp	%rcx, %rdx
@@ -158,7 +158,7 @@ L(shl_0):
 #ifdef DATA_CACHE_SIZE
 	cmp	$DATA_CACHE_SIZE_HALF, %R9_LP
 #else
-	cmp	__x86_64_data_cache_size_half(%rip), %R9_LP
+	cmp	__x86_data_cache_size_half(%rip), %R9_LP
 #endif
 	jae	L(gobble_mem_fwd)
 	sub	$0x80, %rdx
@@ -1480,7 +1480,7 @@ L(gobble_mem_fwd):
 #ifdef SHARED_CACHE_SIZE_HALF
 	mov	$SHARED_CACHE_SIZE_HALF, %RCX_LP
 #else
-	mov	__x86_64_shared_cache_size_half(%rip), %RCX_LP
+	mov	__x86_shared_cache_size_half(%rip), %RCX_LP
 #endif
 #ifdef USE_AS_MEMMOVE
 	mov	%rsi, %r9
@@ -1587,7 +1587,7 @@ L(gobble_mem_bwd):
 #ifdef SHARED_CACHE_SIZE_HALF
 	mov	$SHARED_CACHE_SIZE_HALF, %RCX_LP
 #else
-	mov	__x86_64_shared_cache_size_half(%rip), %RCX_LP
+	mov	__x86_shared_cache_size_half(%rip), %RCX_LP
 #endif
 #ifdef USE_AS_MEMMOVE
 	mov	%rdi, %r9
diff --git a/sysdeps/x86_64/multiarch/memcpy-ssse3.S b/sysdeps/x86_64/multiarch/memcpy-ssse3.S
index 4be7e7b..9642cee 100644
--- a/sysdeps/x86_64/multiarch/memcpy-ssse3.S
+++ b/sysdeps/x86_64/multiarch/memcpy-ssse3.S
@@ -99,7 +99,7 @@ L(80bytesormore):
 #ifdef SHARED_CACHE_SIZE_HALF
 	mov	$SHARED_CACHE_SIZE_HALF, %RCX_LP
 #else
-	mov	__x86_64_shared_cache_size_half(%rip), %RCX_LP
+	mov	__x86_shared_cache_size_half(%rip), %RCX_LP
 #endif
 	cmp	%rcx, %rdx
 	mov	%rsi, %r9
@@ -109,7 +109,7 @@ L(80bytesormore):
 #ifdef DATA_CACHE_SIZE_HALF
 	mov	$DATA_CACHE_SIZE_HALF, %RCX_LP
 #else
-	mov	__x86_64_data_cache_size_half(%rip), %RCX_LP
+	mov	__x86_data_cache_size_half(%rip), %RCX_LP
 #endif
 	BRANCH_TO_JMPTBL_ENTRY (L(shl_table), %r9, 4)
 
@@ -129,7 +129,7 @@ L(copy_backward):
 #ifdef SHARED_CACHE_SIZE_HALF
 	mov	$SHARED_CACHE_SIZE_HALF, %RCX_LP
 #else
-	mov	__x86_64_shared_cache_size_half(%rip), %RCX_LP
+	mov	__x86_shared_cache_size_half(%rip), %RCX_LP
 #endif
 
 	cmp	%rcx, %rdx
@@ -140,7 +140,7 @@ L(copy_backward):
 #ifdef DATA_CACHE_SIZE_HALF
 	mov	$DATA_CACHE_SIZE_HALF, %RCX_LP
 #else
-	mov	__x86_64_data_cache_size_half(%rip), %RCX_LP
+	mov	__x86_data_cache_size_half(%rip), %RCX_LP
 #endif
 	BRANCH_TO_JMPTBL_ENTRY (L(shl_table_bwd), %r9, 4)
 
@@ -177,7 +177,7 @@ L(shl_0_gobble):
 #ifdef DATA_CACHE_SIZE_HALF
 	cmp	$DATA_CACHE_SIZE_HALF, %RDX_LP
 #else
-	cmp	__x86_64_data_cache_size_half(%rip), %RDX_LP
+	cmp	__x86_data_cache_size_half(%rip), %RDX_LP
 #endif
 	lea	-128(%rdx), %rdx
 	jae	L(shl_0_gobble_mem_loop)
@@ -318,7 +318,7 @@ L(shl_0_gobble_bwd):
 #ifdef DATA_CACHE_SIZE_HALF
 	cmp	$DATA_CACHE_SIZE_HALF, %RDX_LP
 #else
-	cmp	__x86_64_data_cache_size_half(%rip), %RDX_LP
+	cmp	__x86_data_cache_size_half(%rip), %RDX_LP
 #endif
 	lea	-128(%rdx), %rdx
 	jae	L(shl_0_gobble_mem_bwd_loop)
-- 
1.7.11.7


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]