unaligned.h 7.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187
  1. #ifndef ASM_UNALIGNED_H
  2. #define ASM_UNALIGNED_H
  3. #include <assert.h>
  4. #include <linux/types.h>
  5. #ifndef __LITTLE_ENDIAN
  6. # if (__BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__) || defined(__LITTLE_ENDIAN__)
  7. # define __LITTLE_ENDIAN 1
  8. # endif
  9. #endif
  10. #ifdef __LITTLE_ENDIAN
  11. # define _IS_LITTLE_ENDIAN 1
  12. #else
  13. # define _IS_LITTLE_ENDIAN 0
  14. #endif
  15. static unsigned _isLittleEndian(void)
  16. {
  17. const union { uint32_t u; uint8_t c[4]; } one = { 1 };
  18. assert(_IS_LITTLE_ENDIAN == one.c[0]);
  19. (void)one;
  20. return _IS_LITTLE_ENDIAN;
  21. }
  22. static uint16_t _swap16(uint16_t in)
  23. {
  24. return ((in & 0xF) << 8) + ((in & 0xF0) >> 8);
  25. }
  26. static uint32_t _swap32(uint32_t in)
  27. {
  28. return __builtin_bswap32(in);
  29. }
  30. static uint64_t _swap64(uint64_t in)
  31. {
  32. return __builtin_bswap64(in);
  33. }
  34. /* Little endian */
  35. static uint16_t get_unaligned_le16(const void* memPtr)
  36. {
  37. uint16_t val;
  38. __builtin_memcpy(&val, memPtr, sizeof(val));
  39. if (!_isLittleEndian()) _swap16(val);
  40. return val;
  41. }
  42. static uint32_t get_unaligned_le32(const void* memPtr)
  43. {
  44. uint32_t val;
  45. __builtin_memcpy(&val, memPtr, sizeof(val));
  46. if (!_isLittleEndian()) _swap32(val);
  47. return val;
  48. }
  49. static uint64_t get_unaligned_le64(const void* memPtr)
  50. {
  51. uint64_t val;
  52. __builtin_memcpy(&val, memPtr, sizeof(val));
  53. if (!_isLittleEndian()) _swap64(val);
  54. return val;
  55. }
  56. static void put_unaligned_le16(uint16_t value, void* memPtr)
  57. {
  58. if (!_isLittleEndian()) value = _swap16(value);
  59. __builtin_memcpy(memPtr, &value, sizeof(value));
  60. }
  61. static void put_unaligned_le32(uint32_t value, void* memPtr)
  62. {
  63. if (!_isLittleEndian()) value = _swap32(value);
  64. __builtin_memcpy(memPtr, &value, sizeof(value));
  65. }
  66. static void put_unaligned_le64(uint64_t value, void* memPtr)
  67. {
  68. if (!_isLittleEndian()) value = _swap64(value);
  69. __builtin_memcpy(memPtr, &value, sizeof(value));
  70. }
  71. /* big endian */
  72. static uint32_t get_unaligned_be32(const void* memPtr)
  73. {
  74. uint32_t val;
  75. __builtin_memcpy(&val, memPtr, sizeof(val));
  76. if (_isLittleEndian()) _swap32(val);
  77. return val;
  78. }
  79. static uint64_t get_unaligned_be64(const void* memPtr)
  80. {
  81. uint64_t val;
  82. __builtin_memcpy(&val, memPtr, sizeof(val));
  83. if (_isLittleEndian()) _swap64(val);
  84. return val;
  85. }
  86. static void put_unaligned_be32(uint32_t value, void* memPtr)
  87. {
  88. if (_isLittleEndian()) value = _swap32(value);
  89. __builtin_memcpy(memPtr, &value, sizeof(value));
  90. }
  91. static void put_unaligned_be64(uint64_t value, void* memPtr)
  92. {
  93. if (_isLittleEndian()) value = _swap64(value);
  94. __builtin_memcpy(memPtr, &value, sizeof(value));
  95. }
  96. /* generic */
  97. extern void __bad_unaligned_access_size(void);
  98. #define __get_unaligned_le(ptr) ((typeof(*(ptr)))({ \
  99. __builtin_choose_expr(sizeof(*(ptr)) == 1, *(ptr), \
  100. __builtin_choose_expr(sizeof(*(ptr)) == 2, get_unaligned_le16((ptr)), \
  101. __builtin_choose_expr(sizeof(*(ptr)) == 4, get_unaligned_le32((ptr)), \
  102. __builtin_choose_expr(sizeof(*(ptr)) == 8, get_unaligned_le64((ptr)), \
  103. __bad_unaligned_access_size())))); \
  104. }))
  105. #define __get_unaligned_be(ptr) ((typeof(*(ptr)))({ \
  106. __builtin_choose_expr(sizeof(*(ptr)) == 1, *(ptr), \
  107. __builtin_choose_expr(sizeof(*(ptr)) == 2, get_unaligned_be16((ptr)), \
  108. __builtin_choose_expr(sizeof(*(ptr)) == 4, get_unaligned_be32((ptr)), \
  109. __builtin_choose_expr(sizeof(*(ptr)) == 8, get_unaligned_be64((ptr)), \
  110. __bad_unaligned_access_size())))); \
  111. }))
  112. #define __put_unaligned_le(val, ptr) \
  113. ({ \
  114. void *__gu_p = (ptr); \
  115. switch (sizeof(*(ptr))) { \
  116. case 1: \
  117. *(uint8_t *)__gu_p = (uint8_t)(val); \
  118. break; \
  119. case 2: \
  120. put_unaligned_le16((uint16_t)(val), __gu_p); \
  121. break; \
  122. case 4: \
  123. put_unaligned_le32((uint32_t)(val), __gu_p); \
  124. break; \
  125. case 8: \
  126. put_unaligned_le64((uint64_t)(val), __gu_p); \
  127. break; \
  128. default: \
  129. __bad_unaligned_access_size(); \
  130. break; \
  131. } \
  132. (void)0; \
  133. })
  134. #define __put_unaligned_be(val, ptr) \
  135. ({ \
  136. void *__gu_p = (ptr); \
  137. switch (sizeof(*(ptr))) { \
  138. case 1: \
  139. *(uint8_t *)__gu_p = (uint8_t)(val); \
  140. break; \
  141. case 2: \
  142. put_unaligned_be16((uint16_t)(val), __gu_p); \
  143. break; \
  144. case 4: \
  145. put_unaligned_be32((uint32_t)(val), __gu_p); \
  146. break; \
  147. case 8: \
  148. put_unaligned_be64((uint64_t)(val), __gu_p); \
  149. break; \
  150. default: \
  151. __bad_unaligned_access_size(); \
  152. break; \
  153. } \
  154. (void)0; \
  155. })
  156. #if _IS_LITTLE_ENDIAN
  157. # define get_unaligned __get_unaligned_le
  158. # define put_unaligned __put_unaligned_le
  159. #else
  160. # define get_unaligned __get_unaligned_be
  161. # define put_unaligned __put_unaligned_be
  162. #endif
  163. #endif // ASM_UNALIGNED_H