1 glibc headers define both __LITTLE_ENDIAN and __BIG_ENDIAN
2 which was tripping the check in linux/byteorder.h. Let's
3 just stay out of userspace's way and use __KERN_{endian}
5 The old implementation got away with it as it kept the two
6 implementations in different headers and conditionally included
7 the right one. The combined header does checks within each
8 function body and depends on only one being defined.
10 Converted the arches in mainline that have already moved to the
11 new header, as the other arches merge the will need simlar
14 Signed-off-by: Harvey Harrison <harvey.harrison@gmail.com>
16 arch/avr32/include/asm/byteorder.h | 2 +-
17 arch/mips/include/asm/byteorder.h | 4 +-
18 arch/sh/include/asm/byteorder.h | 4 +-
19 arch/sparc/include/asm/byteorder.h | 2 +-
20 include/linux/byteorder.h | 84 ++++++++++++++++++------------------
21 5 files changed, 48 insertions(+), 48 deletions(-)
23 --- a/arch/avr32/include/asm/byteorder.h
24 +++ b/arch/avr32/include/asm/byteorder.h
26 #include <asm/types.h>
27 #include <linux/compiler.h>
30 +#define __KERN_BIG_ENDIAN
31 #define __SWAB_64_THRU_32__
34 --- a/arch/mips/include/asm/byteorder.h
35 +++ b/arch/mips/include/asm/byteorder.h
37 #include <asm/types.h>
39 #if defined(__MIPSEB__)
40 -# define __BIG_ENDIAN
41 +# define __KERN_BIG_ENDIAN
42 #elif defined(__MIPSEL__)
43 -# define __LITTLE_ENDIAN
44 +# define __KERN_LITTLE_ENDIAN
46 # error "MIPS, but neither __MIPSEB__, nor __MIPSEL__???"
48 --- a/arch/sh/include/asm/byteorder.h
49 +++ b/arch/sh/include/asm/byteorder.h
51 #include <linux/types.h>
53 #ifdef __LITTLE_ENDIAN__
54 -# define __LITTLE_ENDIAN
55 +# define __KERN_LITTLE_ENDIAN
57 -# define __BIG_ENDIAN
58 +# define __KERN_BIG_ENDIAN
61 #define __SWAB_64_THRU_32__
62 --- a/arch/sparc/include/asm/byteorder.h
63 +++ b/arch/sparc/include/asm/byteorder.h
65 #include <asm/types.h>
69 +#define __KERN_BIG_ENDIAN
72 #define __SWAB_64_THRU_32__
73 --- a/include/linux/byteorder.h
74 +++ b/include/linux/byteorder.h
76 #include <linux/types.h>
77 #include <linux/swab.h>
79 -#if defined(__LITTLE_ENDIAN) && defined(__BIG_ENDIAN)
80 +#if defined(__KERN_LITTLE_ENDIAN) && defined(__KERN_BIG_ENDIAN)
81 # error Fix asm/byteorder.h to define one endianness
84 -#if !defined(__LITTLE_ENDIAN) && !defined(__BIG_ENDIAN)
85 +#if !defined(__KERN_LITTLE_ENDIAN) && !defined(__KERN_BIG_ENDIAN)
86 # error Fix asm/byteorder.h to define arch endianness
89 -#ifdef __LITTLE_ENDIAN
90 -# undef __LITTLE_ENDIAN
91 -# define __LITTLE_ENDIAN 1234
92 +#ifdef __KERN_LITTLE_ENDIAN
93 +# ifndef __LITTLE_ENDIAN
94 +# define __LITTLE_ENDIAN 1234
96 +# ifndef __LITTLE_ENDIAN_BITFIELD
97 +# define __LITTLE_ENDIAN_BITFIELD
101 +#ifdef __KERN_BIG_ENDIAN
102 +# ifndef __BIG_ENDIAN
103 +# define __BIG_ENDIAN 4321
105 +# ifndef __BIG_ENDIAN_BITFIELD
106 +# define __BIG_ENDIAN_BITFIELD
111 -# undef __BIG_ENDIAN
112 -# define __BIG_ENDIAN 4321
115 -#if defined(__LITTLE_ENDIAN) && !defined(__LITTLE_ENDIAN_BITFIELD)
116 -# define __LITTLE_ENDIAN_BITFIELD
119 -#if defined(__BIG_ENDIAN) && !defined(__BIG_ENDIAN_BITFIELD)
120 -# define __BIG_ENDIAN_BITFIELD
123 -#ifdef __LITTLE_ENDIAN
124 +#ifdef __KERN_LITTLE_ENDIAN
125 # define __le16_to_cpu(x) ((__force __u16)(__le16)(x))
126 # define __le32_to_cpu(x) ((__force __u32)(__le32)(x))
127 # define __le64_to_cpu(x) ((__force __u64)(__le64)(x))
129 # define __cpu_to_be64(x) ((__force __be64)__swab64(x))
133 +#ifdef __KERN_BIG_ENDIAN
134 # define __be16_to_cpu(x) ((__force __u16)(__be16)(x))
135 # define __be32_to_cpu(x) ((__force __u32)(__be32)(x))
136 # define __be64_to_cpu(x) ((__force __u64)(__be64)(x))
139 static inline void __le16_to_cpus(__u16 *p)
142 +#ifdef __KERN_BIG_ENDIAN
147 static inline void __cpu_to_le16s(__u16 *p)
150 +#ifdef __KERN_BIG_ENDIAN
155 static inline void __le32_to_cpus(__u32 *p)
158 +#ifdef __KERN_BIG_ENDIAN
163 static inline void __cpu_to_le32s(__u32 *p)
166 +#ifdef __KERN_BIG_ENDIAN
171 static inline void __le64_to_cpus(__u64 *p)
174 +#ifdef __KERN_BIG_ENDIAN
179 static inline void __cpu_to_le64s(__u64 *p)
182 +#ifdef __KERN_BIG_ENDIAN
187 static inline void __be16_to_cpus(__u16 *p)
189 -#ifdef __LITTLE_ENDIAN
190 +#ifdef __KERN_LITTLE_ENDIAN
195 static inline void __cpu_to_be16s(__u16 *p)
197 -#ifdef __LITTLE_ENDIAN
198 +#ifdef __KERN_LITTLE_ENDIAN
203 static inline void __be32_to_cpus(__u32 *p)
205 -#ifdef __LITTLE_ENDIAN
206 +#ifdef __KERN_LITTLE_ENDIAN
211 static inline void __cpu_to_be32s(__u32 *p)
213 -#ifdef __LITTLE_ENDIAN
214 +#ifdef __KERN_LITTLE_ENDIAN
219 static inline void __be64_to_cpus(__u64 *p)
221 -#ifdef __LITTLE_ENDIAN
222 +#ifdef __KERN_LITTLE_ENDIAN
227 static inline void __cpu_to_be64s(__u64 *p)
229 -#ifdef __LITTLE_ENDIAN
230 +#ifdef __KERN_LITTLE_ENDIAN
235 static inline __u16 __le16_to_cpup(const __le16 *p)
237 -#ifdef __LITTLE_ENDIAN
238 +#ifdef __KERN_LITTLE_ENDIAN
239 return (__force __u16)*p;
241 return __swab16p((__force __u16 *)p);
242 @@ -180,7 +180,7 @@ static inline __u16 __le16_to_cpup(const
244 static inline __u32 __le32_to_cpup(const __le32 *p)
246 -#ifdef __LITTLE_ENDIAN
247 +#ifdef __KERN_LITTLE_ENDIAN
248 return (__force __u32)*p;
250 return __swab32p((__force __u32 *)p);
251 @@ -189,7 +189,7 @@ static inline __u32 __le32_to_cpup(const
253 static inline __u64 __le64_to_cpup(const __le64 *p)
255 -#ifdef __LITTLE_ENDIAN
256 +#ifdef __KERN_LITTLE_ENDIAN
257 return (__force __u64)*p;
259 return __swab64p((__force __u64 *)p);
260 @@ -198,7 +198,7 @@ static inline __u64 __le64_to_cpup(const
262 static inline __le16 __cpu_to_le16p(const __u16 *p)
264 -#ifdef __LITTLE_ENDIAN
265 +#ifdef __KERN_LITTLE_ENDIAN
266 return (__force __le16)*p;
268 return (__force __le16)__swab16p(p);
269 @@ -207,7 +207,7 @@ static inline __le16 __cpu_to_le16p(cons
271 static inline __le32 __cpu_to_le32p(const __u32 *p)
273 -#ifdef __LITTLE_ENDIAN
274 +#ifdef __KERN_LITTLE_ENDIAN
275 return (__force __le32)*p;
277 return (__force __le32)__swab32p(p);
278 @@ -216,7 +216,7 @@ static inline __le32 __cpu_to_le32p(cons
280 static inline __le64 __cpu_to_le64p(const __u64 *p)
282 -#ifdef __LITTLE_ENDIAN
283 +#ifdef __KERN_LITTLE_ENDIAN
284 return (__force __le64)*p;
286 return (__force __le64)__swab64p(p);
287 @@ -225,7 +225,7 @@ static inline __le64 __cpu_to_le64p(cons
289 static inline __u16 __be16_to_cpup(const __be16 *p)
292 +#ifdef __KERN_BIG_ENDIAN
293 return (__force __u16)*p;
295 return __swab16p((__force __u16 *)p);
296 @@ -234,7 +234,7 @@ static inline __u16 __be16_to_cpup(const
298 static inline __u32 __be32_to_cpup(const __be32 *p)
301 +#ifdef __KERN_BIG_ENDIAN
302 return (__force __u32)*p;
304 return __swab32p((__force __u32 *)p);
305 @@ -243,7 +243,7 @@ static inline __u32 __be32_to_cpup(const
307 static inline __u64 __be64_to_cpup(const __be64 *p)
310 +#ifdef __KERN_BIG_ENDIAN
311 return (__force __u64)*p;
313 return __swab64p((__force __u64 *)p);
314 @@ -252,7 +252,7 @@ static inline __u64 __be64_to_cpup(const
316 static inline __be16 __cpu_to_be16p(const __u16 *p)
319 +#ifdef __KERN_BIG_ENDIAN
320 return (__force __be16)*p;
322 return (__force __be16)__swab16p(p);
323 @@ -261,7 +261,7 @@ static inline __be16 __cpu_to_be16p(cons
325 static inline __be32 __cpu_to_be32p(const __u32 *p)
328 +#ifdef __KERN_BIG_ENDIAN
329 return (__force __be32)*p;
331 return (__force __be32)__swab32p(p);
332 @@ -270,7 +270,7 @@ static inline __be32 __cpu_to_be32p(cons
334 static inline __be64 __cpu_to_be64p(const __u64 *p)
337 +#ifdef __KERN_BIG_ENDIAN
338 return (__force __be64)*p;
340 return (__force __be64)__swab64p(p);