1 glibc headers define both __LITTLE_ENDIAN and __BIG_ENDIAN
2 which was tripping the check in linux/byteorder.h. Let's
3 just stay out of userspace's way and use __KERN_{endian}
5 The old implementation got away with it as it kept the two
6 implementations in different headers and conditionally included
7 the right one. The combined header does checks within each
8 function body and depends on only one being defined.
10 Converted the arches in mainline that have already moved to the
11 new header, as the other arches merge the will need simlar
14 Signed-off-by: Harvey Harrison <harvey.harrison@gmail.com>
16 arch/avr32/include/asm/byteorder.h | 2 +-
17 arch/mips/include/asm/byteorder.h | 4 +-
18 arch/sh/include/asm/byteorder.h | 4 +-
19 arch/sparc/include/asm/byteorder.h | 2 +-
20 include/linux/byteorder.h | 84 ++++++++++++++++++------------------
21 5 files changed, 48 insertions(+), 48 deletions(-)
23 diff --git a/arch/avr32/include/asm/byteorder.h b/arch/avr32/include/asm/byteorder.h
24 index 8e3af02..b7d6dd1 100644
25 --- a/arch/avr32/include/asm/byteorder.h
26 +++ b/arch/avr32/include/asm/byteorder.h
28 #include <asm/types.h>
29 #include <linux/compiler.h>
32 +#define __KERN_BIG_ENDIAN
33 #define __SWAB_64_THRU_32__
36 diff --git a/arch/mips/include/asm/byteorder.h b/arch/mips/include/asm/byteorder.h
37 index 2988d29..8ad8a5b 100644
38 --- a/arch/mips/include/asm/byteorder.h
39 +++ b/arch/mips/include/asm/byteorder.h
41 #include <asm/types.h>
43 #if defined(__MIPSEB__)
44 -# define __BIG_ENDIAN
45 +# define __KERN_BIG_ENDIAN
46 #elif defined(__MIPSEL__)
47 -# define __LITTLE_ENDIAN
48 +# define __KERN_LITTLE_ENDIAN
50 # error "MIPS, but neither __MIPSEB__, nor __MIPSEL__???"
52 diff --git a/arch/sh/include/asm/byteorder.h b/arch/sh/include/asm/byteorder.h
53 index f5fa065..4aa5f1d 100644
54 --- a/arch/sh/include/asm/byteorder.h
55 +++ b/arch/sh/include/asm/byteorder.h
57 #include <linux/types.h>
59 #ifdef __LITTLE_ENDIAN__
60 -# define __LITTLE_ENDIAN
61 +# define __KERN_LITTLE_ENDIAN
63 -# define __BIG_ENDIAN
64 +# define __KERN_BIG_ENDIAN
67 #define __SWAB_64_THRU_32__
68 diff --git a/arch/sparc/include/asm/byteorder.h b/arch/sparc/include/asm/byteorder.h
69 index 5a70f13..5b8347e 100644
70 --- a/arch/sparc/include/asm/byteorder.h
71 +++ b/arch/sparc/include/asm/byteorder.h
73 #include <asm/types.h>
77 +#define __KERN_BIG_ENDIAN
80 #define __SWAB_64_THRU_32__
81 diff --git a/include/linux/byteorder.h b/include/linux/byteorder.h
82 index 29f002d..3599fbc 100644
83 --- a/include/linux/byteorder.h
84 +++ b/include/linux/byteorder.h
86 #include <linux/types.h>
87 #include <linux/swab.h>
89 -#if defined(__LITTLE_ENDIAN) && defined(__BIG_ENDIAN)
90 +#if defined(__KERN_LITTLE_ENDIAN) && defined(__KERN_BIG_ENDIAN)
91 # error Fix asm/byteorder.h to define one endianness
94 -#if !defined(__LITTLE_ENDIAN) && !defined(__BIG_ENDIAN)
95 +#if !defined(__KERN_LITTLE_ENDIAN) && !defined(__KERN_BIG_ENDIAN)
96 # error Fix asm/byteorder.h to define arch endianness
99 -#ifdef __LITTLE_ENDIAN
100 -# undef __LITTLE_ENDIAN
101 -# define __LITTLE_ENDIAN 1234
102 +#ifdef __KERN_LITTLE_ENDIAN
103 +# ifndef __LITTLE_ENDIAN
104 +# define __LITTLE_ENDIAN 1234
106 +# ifndef __LITTLE_ENDIAN_BITFIELD
107 +# define __LITTLE_ENDIAN_BITFIELD
112 -# undef __BIG_ENDIAN
113 -# define __BIG_ENDIAN 4321
114 +#ifdef __KERN_BIG_ENDIAN
115 +# ifndef __BIG_ENDIAN
116 +# define __BIG_ENDIAN 4321
118 +# ifndef __BIG_ENDIAN_BITFIELD
119 +# define __BIG_ENDIAN_BITFIELD
123 -#if defined(__LITTLE_ENDIAN) && !defined(__LITTLE_ENDIAN_BITFIELD)
124 -# define __LITTLE_ENDIAN_BITFIELD
127 -#if defined(__BIG_ENDIAN) && !defined(__BIG_ENDIAN_BITFIELD)
128 -# define __BIG_ENDIAN_BITFIELD
131 -#ifdef __LITTLE_ENDIAN
132 +#ifdef __KERN_LITTLE_ENDIAN
133 # define __le16_to_cpu(x) ((__force __u16)(__le16)(x))
134 # define __le32_to_cpu(x) ((__force __u32)(__le32)(x))
135 # define __le64_to_cpu(x) ((__force __u64)(__le64)(x))
137 # define __cpu_to_be64(x) ((__force __be64)__swab64(x))
141 +#ifdef __KERN_BIG_ENDIAN
142 # define __be16_to_cpu(x) ((__force __u16)(__be16)(x))
143 # define __be32_to_cpu(x) ((__force __u32)(__be32)(x))
144 # define __be64_to_cpu(x) ((__force __u64)(__be64)(x))
147 static inline void __le16_to_cpus(__u16 *p)
150 +#ifdef __KERN_BIG_ENDIAN
155 static inline void __cpu_to_le16s(__u16 *p)
158 +#ifdef __KERN_BIG_ENDIAN
163 static inline void __le32_to_cpus(__u32 *p)
166 +#ifdef __KERN_BIG_ENDIAN
171 static inline void __cpu_to_le32s(__u32 *p)
174 +#ifdef __KERN_BIG_ENDIAN
179 static inline void __le64_to_cpus(__u64 *p)
182 +#ifdef __KERN_BIG_ENDIAN
187 static inline void __cpu_to_le64s(__u64 *p)
190 +#ifdef __KERN_BIG_ENDIAN
195 static inline void __be16_to_cpus(__u16 *p)
197 -#ifdef __LITTLE_ENDIAN
198 +#ifdef __KERN_LITTLE_ENDIAN
203 static inline void __cpu_to_be16s(__u16 *p)
205 -#ifdef __LITTLE_ENDIAN
206 +#ifdef __KERN_LITTLE_ENDIAN
211 static inline void __be32_to_cpus(__u32 *p)
213 -#ifdef __LITTLE_ENDIAN
214 +#ifdef __KERN_LITTLE_ENDIAN
219 static inline void __cpu_to_be32s(__u32 *p)
221 -#ifdef __LITTLE_ENDIAN
222 +#ifdef __KERN_LITTLE_ENDIAN
227 static inline void __be64_to_cpus(__u64 *p)
229 -#ifdef __LITTLE_ENDIAN
230 +#ifdef __KERN_LITTLE_ENDIAN
235 static inline void __cpu_to_be64s(__u64 *p)
237 -#ifdef __LITTLE_ENDIAN
238 +#ifdef __KERN_LITTLE_ENDIAN
243 static inline __u16 __le16_to_cpup(const __le16 *p)
245 -#ifdef __LITTLE_ENDIAN
246 +#ifdef __KERN_LITTLE_ENDIAN
247 return (__force __u16)*p;
249 return __swab16p((__force __u16 *)p);
250 @@ -180,7 +180,7 @@ static inline __u16 __le16_to_cpup(const __le16 *p)
252 static inline __u32 __le32_to_cpup(const __le32 *p)
254 -#ifdef __LITTLE_ENDIAN
255 +#ifdef __KERN_LITTLE_ENDIAN
256 return (__force __u32)*p;
258 return __swab32p((__force __u32 *)p);
259 @@ -189,7 +189,7 @@ static inline __u32 __le32_to_cpup(const __le32 *p)
261 static inline __u64 __le64_to_cpup(const __le64 *p)
263 -#ifdef __LITTLE_ENDIAN
264 +#ifdef __KERN_LITTLE_ENDIAN
265 return (__force __u64)*p;
267 return __swab64p((__force __u64 *)p);
268 @@ -198,7 +198,7 @@ static inline __u64 __le64_to_cpup(const __le64 *p)
270 static inline __le16 __cpu_to_le16p(const __u16 *p)
272 -#ifdef __LITTLE_ENDIAN
273 +#ifdef __KERN_LITTLE_ENDIAN
274 return (__force __le16)*p;
276 return (__force __le16)__swab16p(p);
277 @@ -207,7 +207,7 @@ static inline __le16 __cpu_to_le16p(const __u16 *p)
279 static inline __le32 __cpu_to_le32p(const __u32 *p)
281 -#ifdef __LITTLE_ENDIAN
282 +#ifdef __KERN_LITTLE_ENDIAN
283 return (__force __le32)*p;
285 return (__force __le32)__swab32p(p);
286 @@ -216,7 +216,7 @@ static inline __le32 __cpu_to_le32p(const __u32 *p)
288 static inline __le64 __cpu_to_le64p(const __u64 *p)
290 -#ifdef __LITTLE_ENDIAN
291 +#ifdef __KERN_LITTLE_ENDIAN
292 return (__force __le64)*p;
294 return (__force __le64)__swab64p(p);
295 @@ -225,7 +225,7 @@ static inline __le64 __cpu_to_le64p(const __u64 *p)
297 static inline __u16 __be16_to_cpup(const __be16 *p)
300 +#ifdef __KERN_BIG_ENDIAN
301 return (__force __u16)*p;
303 return __swab16p((__force __u16 *)p);
304 @@ -234,7 +234,7 @@ static inline __u16 __be16_to_cpup(const __be16 *p)
306 static inline __u32 __be32_to_cpup(const __be32 *p)
309 +#ifdef __KERN_BIG_ENDIAN
310 return (__force __u32)*p;
312 return __swab32p((__force __u32 *)p);
313 @@ -243,7 +243,7 @@ static inline __u32 __be32_to_cpup(const __be32 *p)
315 static inline __u64 __be64_to_cpup(const __be64 *p)
318 +#ifdef __KERN_BIG_ENDIAN
319 return (__force __u64)*p;
321 return __swab64p((__force __u64 *)p);
322 @@ -252,7 +252,7 @@ static inline __u64 __be64_to_cpup(const __be64 *p)
324 static inline __be16 __cpu_to_be16p(const __u16 *p)
327 +#ifdef __KERN_BIG_ENDIAN
328 return (__force __be16)*p;
330 return (__force __be16)__swab16p(p);
331 @@ -261,7 +261,7 @@ static inline __be16 __cpu_to_be16p(const __u16 *p)
333 static inline __be32 __cpu_to_be32p(const __u32 *p)
336 +#ifdef __KERN_BIG_ENDIAN
337 return (__force __be32)*p;
339 return (__force __be32)__swab32p(p);
340 @@ -270,7 +270,7 @@ static inline __be32 __cpu_to_be32p(const __u32 *p)
342 static inline __be64 __cpu_to_be64p(const __u64 *p)
345 +#ifdef __KERN_BIG_ENDIAN
346 return (__force __be64)*p;
348 return (__force __be64)__swab64p(p);