1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171
|
#ifndef __ALPHA_UNALIGNED_H
#define __ALPHA_UNALIGNED_H
/*
* The main single-value unaligned transfer routines.
*/
#define get_unaligned(ptr) \
((__typeof__(*(ptr)))__get_unaligned((ptr), sizeof(*(ptr))))
#define put_unaligned(x,ptr) \
__put_unaligned((unsigned long)(x), (ptr), sizeof(*(ptr)))
/*
* This is a silly but good way to make sure that
* the get/put functions are indeed always optimized,
* and that we use the correct sizes.
*/
extern void bad_unaligned_access_length(void);
/*
* Elemental unaligned loads
*/
extern inline unsigned long __uldq(const unsigned long * r11)
{
unsigned long r1,r2;
__asm__("ldq_u %0,%3\n\t"
"ldq_u %1,%4\n\t"
"extql %0,%2,%0\n\t"
"extqh %1,%2,%1"
:"=&r" (r1), "=&r" (r2)
:"r" (r11),
"m" (*r11),
"m" (*(const unsigned long *)(7+(char *) r11)));
return r1 | r2;
}
extern inline unsigned long __uldl(const unsigned int * r11)
{
unsigned long r1,r2;
__asm__("ldq_u %0,%3\n\t"
"ldq_u %1,%4\n\t"
"extll %0,%2,%0\n\t"
"extlh %1,%2,%1"
:"=&r" (r1), "=&r" (r2)
:"r" (r11),
"m" (*r11),
"m" (*(const unsigned long *)(3+(char *) r11)));
return r1 | r2;
}
extern inline unsigned long __uldw(const unsigned short * r11)
{
unsigned long r1,r2;
__asm__("ldq_u %0,%3\n\t"
"ldq_u %1,%4\n\t"
"extwl %0,%2,%0\n\t"
"extwh %1,%2,%1"
:"=&r" (r1), "=&r" (r2)
:"r" (r11),
"m" (*r11),
"m" (*(const unsigned long *)(1+(char *) r11)));
return r1 | r2;
}
/*
* Elemental unaligned stores
*/
extern inline void __ustq(unsigned long r5, unsigned long * r11)
{
unsigned long r1,r2,r3,r4;
__asm__("ldq_u %3,%1\n\t"
"ldq_u %2,%0\n\t"
"insqh %6,%7,%5\n\t"
"insql %6,%7,%4\n\t"
"mskqh %3,%7,%3\n\t"
"mskql %2,%7,%2\n\t"
"bis %3,%5,%3\n\t"
"bis %2,%4,%2\n\t"
"stq_u %3,%1\n\t"
"stq_u %2,%0"
:"=m" (*r11),
"=m" (*(unsigned long *)(7+(char *) r11)),
"=&r" (r1), "=&r" (r2), "=&r" (r3), "=&r" (r4)
:"r" (r5), "r" (r11));
}
extern inline void __ustl(unsigned long r5, unsigned int * r11)
{
unsigned long r1,r2,r3,r4;
__asm__("ldq_u %3,%1\n\t"
"ldq_u %2,%0\n\t"
"inslh %6,%7,%5\n\t"
"insll %6,%7,%4\n\t"
"msklh %3,%7,%3\n\t"
"mskll %2,%7,%2\n\t"
"bis %3,%5,%3\n\t"
"bis %2,%4,%2\n\t"
"stq_u %3,%1\n\t"
"stq_u %2,%0"
:"=m" (*r11),
"=m" (*(unsigned long *)(3+(char *) r11)),
"=&r" (r1), "=&r" (r2), "=&r" (r3), "=&r" (r4)
:"r" (r5), "r" (r11));
}
extern inline void __ustw(unsigned long r5, unsigned short * r11)
{
unsigned long r1,r2,r3,r4;
__asm__("ldq_u %3,%1\n\t"
"ldq_u %2,%0\n\t"
"inswh %6,%7,%5\n\t"
"inswl %6,%7,%4\n\t"
"mskwh %3,%7,%3\n\t"
"mskwl %2,%7,%2\n\t"
"bis %3,%5,%3\n\t"
"bis %2,%4,%2\n\t"
"stq_u %3,%1\n\t"
"stq_u %2,%0"
:"=m" (*r11),
"=m" (*(unsigned long *)(1+(char *) r11)),
"=&r" (r1), "=&r" (r2), "=&r" (r3), "=&r" (r4)
:"r" (r5), "r" (r11));
}
extern inline unsigned long __get_unaligned(const void *ptr, size_t size)
{
unsigned long val;
switch (size) {
case 1:
val = *(const unsigned char *)ptr;
break;
case 2:
val = __uldw((const unsigned short *)ptr);
break;
case 4:
val = __uldl((const unsigned int *)ptr);
break;
case 8:
val = __uldq((const unsigned long *)ptr);
break;
default:
bad_unaligned_access_length();
}
return val;
}
extern inline void __put_unaligned(unsigned long val, void *ptr, size_t size)
{
switch (size) {
case 1:
*(unsigned char *)ptr = (val);
break;
case 2:
__ustw(val, (unsigned short *)ptr);
break;
case 4:
__ustl(val, (unsigned int *)ptr);
break;
case 8:
__ustq(val, (unsigned long *)ptr);
break;
default:
bad_unaligned_access_length();
}
}
#endif
|