9 #ifndef BOTAN_LOAD_STORE_H__    10 #define BOTAN_LOAD_STORE_H__    12 #include <botan/types.h>    13 #include <botan/bswap.h>    14 #include <botan/get_byte.h>    17 #if BOTAN_TARGET_UNALIGNED_MEMORY_ACCESS_OK    19 #if defined(BOTAN_TARGET_CPU_IS_BIG_ENDIAN)    21 #define BOTAN_ENDIAN_N2B(x) (x)    22 #define BOTAN_ENDIAN_B2N(x) (x)    24 #define BOTAN_ENDIAN_N2L(x) reverse_bytes(x)    25 #define BOTAN_ENDIAN_L2N(x) reverse_bytes(x)    27 #elif defined(BOTAN_TARGET_CPU_IS_LITTLE_ENDIAN)    29 #define BOTAN_ENDIAN_N2L(x) (x)    30 #define BOTAN_ENDIAN_L2N(x) (x)    32 #define BOTAN_ENDIAN_N2B(x) reverse_bytes(x)    33 #define BOTAN_ENDIAN_B2N(x) reverse_bytes(x)    49    return ((static_cast<u16bit>(i0) << 8) | i1);
    62    return ((static_cast<u32bit>(i0) << 24) |
    63            (static_cast<u32bit>(i1) << 16) |
    64            (static_cast<u32bit>(i2) <<  8) |
    65            (static_cast<u32bit>(i3)));
    83    return ((static_cast<u64bit>(i0) << 56) |
    84            (static_cast<u64bit>(i1) << 48) |
    85            (static_cast<u64bit>(i2) << 40) |
    86            (static_cast<u64bit>(i3) << 32) |
    87            (static_cast<u64bit>(i4) << 24) |
    88            (static_cast<u64bit>(i5) << 16) |
    89            (static_cast<u64bit>(i6) <<  8) |
    90            (static_cast<u64bit>(i7)));
   102    in += off * 
sizeof(T);
   104    for(
size_t i = 0; i != 
sizeof(T); ++i)
   105       out = (out << 8) | in[i];
   118    in += off * 
sizeof(T);
   120    for(
size_t i = 0; i != 
sizeof(T); ++i)
   121       out = (out << 8) | in[
sizeof(T)-1-i];
   134 #if BOTAN_TARGET_UNALIGNED_MEMORY_ACCESS_OK   135    return BOTAN_ENDIAN_N2B(*(reinterpret_cast<const u16bit*>(in) + off));
   137    in += off * 
sizeof(
u16bit);
   151 #if BOTAN_TARGET_UNALIGNED_MEMORY_ACCESS_OK   152    return BOTAN_ENDIAN_N2L(*(reinterpret_cast<const u16bit*>(in) + off));
   154    in += off * 
sizeof(
u16bit);
   168 #if BOTAN_TARGET_UNALIGNED_MEMORY_ACCESS_OK   169    return BOTAN_ENDIAN_N2B(*(reinterpret_cast<const u32bit*>(in) + off));
   171    in += off * 
sizeof(
u32bit);
   185 #if BOTAN_TARGET_UNALIGNED_MEMORY_ACCESS_OK   186    return BOTAN_ENDIAN_N2L(*(reinterpret_cast<const u32bit*>(in) + off));
   188    in += off * 
sizeof(
u32bit);
   202 #if BOTAN_TARGET_UNALIGNED_MEMORY_ACCESS_OK   203    return BOTAN_ENDIAN_N2B(*(reinterpret_cast<const u64bit*>(in) + off));
   205    in += off * 
sizeof(
u64bit);
   207                       in[4], in[5], in[6], in[7]);
   220 #if BOTAN_TARGET_UNALIGNED_MEMORY_ACCESS_OK   221    return BOTAN_ENDIAN_N2L(*(reinterpret_cast<const u64bit*>(in) + off));
   223    in += off * 
sizeof(
u64bit);
   225                       in[3], in[2], in[1], in[0]);
   238    x0 = load_le<T>(in, 0);
   239    x1 = load_le<T>(in, 1);
   252                     T& x0, T& x1, T& x2, T& x3)
   254    x0 = load_le<T>(in, 0);
   255    x1 = load_le<T>(in, 1);
   256    x2 = load_le<T>(in, 2);
   257    x3 = load_le<T>(in, 3);
   274                     T& x0, T& x1, T& x2, T& x3,
   275                     T& x4, T& x5, T& x6, T& x7)
   277    x0 = load_le<T>(in, 0);
   278    x1 = load_le<T>(in, 1);
   279    x2 = load_le<T>(in, 2);
   280    x3 = load_le<T>(in, 3);
   281    x4 = load_le<T>(in, 4);
   282    x5 = load_le<T>(in, 5);
   283    x6 = load_le<T>(in, 6);
   284    x7 = load_le<T>(in, 7);
   298 #if defined(BOTAN_TARGET_CPU_HAS_KNOWN_ENDIANNESS)   299    std::memcpy(out, in, 
sizeof(T)*count);
   301 #if defined(BOTAN_TARGET_CPU_IS_BIG_ENDIAN)   302    const size_t blocks = count - (count % 4);
   303    const size_t left = count - blocks;
   305    for(
size_t i = 0; i != blocks; i += 4)
   308    for(
size_t i = 0; i != left; ++i)
   313    for(
size_t i = 0; i != count; ++i)
   314       out[i] = load_le<T>(in, i);
   327    x0 = load_be<T>(in, 0);
   328    x1 = load_be<T>(in, 1);
   341                     T& x0, T& x1, T& x2, T& x3)
   343    x0 = load_be<T>(in, 0);
   344    x1 = load_be<T>(in, 1);
   345    x2 = load_be<T>(in, 2);
   346    x3 = load_be<T>(in, 3);
   363                     T& x0, T& x1, T& x2, T& x3,
   364                     T& x4, T& x5, T& x6, T& x7)
   366    x0 = load_be<T>(in, 0);
   367    x1 = load_be<T>(in, 1);
   368    x2 = load_be<T>(in, 2);
   369    x3 = load_be<T>(in, 3);
   370    x4 = load_be<T>(in, 4);
   371    x5 = load_be<T>(in, 5);
   372    x6 = load_be<T>(in, 6);
   373    x7 = load_be<T>(in, 7);
   387 #if defined(BOTAN_TARGET_CPU_HAS_KNOWN_ENDIANNESS)   388    std::memcpy(out, in, 
sizeof(T)*count);
   390 #if defined(BOTAN_TARGET_CPU_IS_LITTLE_ENDIAN)   391    const size_t blocks = count - (count % 4);
   392    const size_t left = count - blocks;
   394    for(
size_t i = 0; i != blocks; i += 4)
   397    for(
size_t i = 0; i != left; ++i)
   402    for(
size_t i = 0; i != count; ++i)
   403       out[i] = load_be<T>(in, i);
   414 #if BOTAN_TARGET_UNALIGNED_MEMORY_ACCESS_OK   415    *
reinterpret_cast<u16bit*
>(out) = BOTAN_ENDIAN_B2N(in);
   429 #if BOTAN_TARGET_UNALIGNED_MEMORY_ACCESS_OK   430    *
reinterpret_cast<u16bit*
>(out) = BOTAN_ENDIAN_L2N(in);
   444 #if BOTAN_TARGET_UNALIGNED_MEMORY_ACCESS_OK   445    *
reinterpret_cast<u32bit*
>(out) = BOTAN_ENDIAN_B2N(in);
   461 #if BOTAN_TARGET_UNALIGNED_MEMORY_ACCESS_OK   462    *
reinterpret_cast<u32bit*
>(out) = BOTAN_ENDIAN_L2N(in);
   478 #if BOTAN_TARGET_UNALIGNED_MEMORY_ACCESS_OK   479    *
reinterpret_cast<u64bit*
>(out) = BOTAN_ENDIAN_B2N(in);
   499 #if BOTAN_TARGET_UNALIGNED_MEMORY_ACCESS_OK   500    *
reinterpret_cast<u64bit*
>(out) = BOTAN_ENDIAN_L2N(in);
   522    store_le(x0, out + (0 * 
sizeof(T)));
   523    store_le(x1, out + (1 * 
sizeof(T)));
   535    store_be(x0, out + (0 * 
sizeof(T)));
   536    store_be(x1, out + (1 * 
sizeof(T)));
   550    store_le(x0, out + (0 * 
sizeof(T)));
   551    store_le(x1, out + (1 * 
sizeof(T)));
   552    store_le(x2, out + (2 * 
sizeof(T)));
   553    store_le(x3, out + (3 * 
sizeof(T)));
   567    store_be(x0, out + (0 * 
sizeof(T)));
   568    store_be(x1, out + (1 * 
sizeof(T)));
   569    store_be(x2, out + (2 * 
sizeof(T)));
   570    store_be(x3, out + (3 * 
sizeof(T)));
   587                                  T x4, T x5, T x6, T x7)
   589    store_le(x0, out + (0 * 
sizeof(T)));
   590    store_le(x1, out + (1 * 
sizeof(T)));
   591    store_le(x2, out + (2 * 
sizeof(T)));
   592    store_le(x3, out + (3 * 
sizeof(T)));
   593    store_le(x4, out + (4 * 
sizeof(T)));
   594    store_le(x5, out + (5 * 
sizeof(T)));
   595    store_le(x6, out + (6 * 
sizeof(T)));
   596    store_le(x7, out + (7 * 
sizeof(T)));
   613                                  T x4, T x5, T x6, T x7)
   615    store_be(x0, out + (0 * 
sizeof(T)));
   616    store_be(x1, out + (1 * 
sizeof(T)));
   617    store_be(x2, out + (2 * 
sizeof(T)));
   618    store_be(x3, out + (3 * 
sizeof(T)));
   619    store_be(x4, out + (4 * 
sizeof(T)));
   620    store_be(x5, out + (5 * 
sizeof(T)));
   621    store_be(x6, out + (6 * 
sizeof(T)));
   622    store_be(x7, out + (7 * 
sizeof(T)));
 T load_be(const byte in[], size_t off)
T load_le(const byte in[], size_t off)
void store_le(u16bit in, byte out[2])
byte get_byte(size_t byte_num, T input)
unsigned long long u64bit
u16bit load_be< u16bit >(const byte in[], size_t off)
u32bit load_le< u32bit >(const byte in[], size_t off)
u32bit load_be< u32bit >(const byte in[], size_t off)
u16bit load_le< u16bit >(const byte in[], size_t off)
u64bit load_be< u64bit >(const byte in[], size_t off)
void store_be(u16bit in, byte out[2])
u16bit make_u16bit(byte i0, byte i1)
u64bit make_u64bit(byte i0, byte i1, byte i2, byte i3, byte i4, byte i5, byte i6, byte i7)
u32bit make_u32bit(byte i0, byte i1, byte i2, byte i3)
u64bit load_le< u64bit >(const byte in[], size_t off)
u16bit reverse_bytes(u16bit val)