29# define GNUC_PREREQ(maj, min) \
30 ((__GNUC__ << 16) + __GNUC_MINOR__ >= ((maj) << 16) + (min))
32# define GNUC_PREREQ(maj, min) 0
35#define AMASK_BWX (1 << 0)
36#define AMASK_FIX (1 << 1)
37#define AMASK_CIX (1 << 2)
38#define AMASK_MVI (1 << 8)
43# define HAVE_BWX() (amask(AMASK_BWX) == 0)
48# define HAVE_FIX() (amask(AMASK_FIX) == 0)
53# define HAVE_MVI() (amask(AMASK_MVI) == 0)
58# define HAVE_CIX() (amask(AMASK_CIX) == 0)
75#define ldq(p) (*(const uint64_t *) (p))
76#define ldl(p) (*(const int32_t *) (p))
77#define stl(l, p) do { *(uint32_t *) (p) = (l); } while (0)
78#define stq(l, p) do { *(uint64_t *) (p) = (l); } while (0)
79#define sextw(x) ((int16_t) (x))
83#define ldq_u(p) (*(const uint64_t *) (((uint64_t) (p)) & ~7ul))
84#define uldq(a) (((const struct unaligned_long *) (a))->l)
87#define prefetch(p) __builtin_prefetch((p), 0, 1)
88#define prefetch_en(p) __builtin_prefetch((p), 0, 0)
89#define prefetch_m(p) __builtin_prefetch((p), 1, 1)
90#define prefetch_men(p) __builtin_prefetch((p), 1, 0)
91#define cmpbge __builtin_alpha_cmpbge
93#define extql(a, b) __builtin_alpha_extql(a, (uint64_t) (b))
94#define extwl(a, b) __builtin_alpha_extwl(a, (uint64_t) (b))
95#define extqh(a, b) __builtin_alpha_extqh(a, (uint64_t) (b))
96#define zap __builtin_alpha_zap
97#define zapnot __builtin_alpha_zapnot
98#define amask __builtin_alpha_amask
99#define implver __builtin_alpha_implver
100#define rpcc __builtin_alpha_rpcc
102#define prefetch(p) asm volatile("ldl $31,%0" : : "m"(*(const char *) (p)) : "memory")
103#define prefetch_en(p) asm volatile("ldq $31,%0" : : "m"(*(const char *) (p)) : "memory")
104#define prefetch_m(p) asm volatile("lds $f31,%0" : : "m"(*(const char *) (p)) : "memory")
105#define prefetch_men(p) asm volatile("ldt $f31,%0" : : "m"(*(const char *) (p)) : "memory")
106#define cmpbge(a, b) ({ uint64_t __r; asm ("cmpbge %r1,%2,%0" : "=r" (__r) : "rJ" (a), "rI" (b)); __r; })
107#define extql(a, b) ({ uint64_t __r; asm ("extql %r1,%2,%0" : "=r" (__r) : "rJ" (a), "rI" (b)); __r; })
108#define extwl(a, b) ({ uint64_t __r; asm ("extwl %r1,%2,%0" : "=r" (__r) : "rJ" (a), "rI" (b)); __r; })
109#define extqh(a, b) ({ uint64_t __r; asm ("extqh %r1,%2,%0" : "=r" (__r) : "rJ" (a), "rI" (b)); __r; })
110#define zap(a, b) ({ uint64_t __r; asm ("zap %r1,%2,%0" : "=r" (__r) : "rJ" (a), "rI" (b)); __r; })
111#define zapnot(a, b) ({ uint64_t __r; asm ("zapnot %r1,%2,%0" : "=r" (__r) : "rJ" (a), "rI" (b)); __r; })
112#define amask(a) ({ uint64_t __r; asm ("amask %1,%0" : "=r" (__r) : "rI" (a)); __r; })
113#define implver() ({ uint64_t __r; asm ("implver %0" : "=r" (__r)); __r; })
114#define rpcc() ({ uint64_t __r; asm volatile ("rpcc %0" : "=r" (__r)); __r; })
116#define wh64(p) asm volatile("wh64 (%0)" : : "r"(p) : "memory")
118#if GNUC_PREREQ(3,3) && defined(__alpha_max__)
119#define minub8 __builtin_alpha_minub8
120#define minsb8 __builtin_alpha_minsb8
121#define minuw4 __builtin_alpha_minuw4
122#define minsw4 __builtin_alpha_minsw4
123#define maxub8 __builtin_alpha_maxub8
124#define maxsb8 __builtin_alpha_maxsb8
125#define maxuw4 __builtin_alpha_maxuw4
126#define maxsw4 __builtin_alpha_maxsw4
127#define perr __builtin_alpha_perr
128#define pklb __builtin_alpha_pklb
129#define pkwb __builtin_alpha_pkwb
130#define unpkbl __builtin_alpha_unpkbl
131#define unpkbw __builtin_alpha_unpkbw
133#define minub8(a, b) ({ uint64_t __r; asm (".arch ev6; minub8 %r1,%2,%0" : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
134#define minsb8(a, b) ({ uint64_t __r; asm (".arch ev6; minsb8 %r1,%2,%0" : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
135#define minuw4(a, b) ({ uint64_t __r; asm (".arch ev6; minuw4 %r1,%2,%0" : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
136#define minsw4(a, b) ({ uint64_t __r; asm (".arch ev6; minsw4 %r1,%2,%0" : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
137#define maxub8(a, b) ({ uint64_t __r; asm (".arch ev6; maxub8 %r1,%2,%0" : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
138#define maxsb8(a, b) ({ uint64_t __r; asm (".arch ev6; maxsb8 %r1,%2,%0" : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
139#define maxuw4(a, b) ({ uint64_t __r; asm (".arch ev6; maxuw4 %r1,%2,%0" : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
140#define maxsw4(a, b) ({ uint64_t __r; asm (".arch ev6; maxsw4 %r1,%2,%0" : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
141#define perr(a, b) ({ uint64_t __r; asm (".arch ev6; perr %r1,%r2,%0" : "=r" (__r) : "%rJ" (a), "rJ" (b)); __r; })
142#define pklb(a) ({ uint64_t __r; asm (".arch ev6; pklb %r1,%0" : "=r" (__r) : "rJ" (a)); __r; })
143#define pkwb(a) ({ uint64_t __r; asm (".arch ev6; pkwb %r1,%0" : "=r" (__r) : "rJ" (a)); __r; })
144#define unpkbl(a) ({ uint64_t __r; asm (".arch ev6; unpkbl %r1,%0" : "=r" (__r) : "rJ" (a)); __r; })
145#define unpkbw(a) ({ uint64_t __r; asm (".arch ev6; unpkbw %r1,%0" : "=r" (__r) : "rJ" (a)); __r; })
151#define ldq_u(a) asm ("ldq_u %v0,0(%a0)", a)
152#define uldq(a) (*(const __unaligned uint64_t *) (a))
153#define cmpbge(a, b) asm ("cmpbge %a0,%a1,%v0", a, b)
154#define extql(a, b) asm ("extql %a0,%a1,%v0", a, b)
155#define extwl(a, b) asm ("extwl %a0,%a1,%v0", a, b)
156#define extqh(a, b) asm ("extqh %a0,%a1,%v0", a, b)
157#define zap(a, b) asm ("zap %a0,%a1,%v0", a, b)
158#define zapnot(a, b) asm ("zapnot %a0,%a1,%v0", a, b)
159#define amask(a) asm ("amask %a0,%v0", a)
160#define implver() asm ("implver %v0")
161#define rpcc() asm ("rpcc %v0")
162#define minub8(a, b) asm ("minub8 %a0,%a1,%v0", a, b)
163#define minsb8(a, b) asm ("minsb8 %a0,%a1,%v0", a, b)
164#define minuw4(a, b) asm ("minuw4 %a0,%a1,%v0", a, b)
165#define minsw4(a, b) asm ("minsw4 %a0,%a1,%v0", a, b)
166#define maxub8(a, b) asm ("maxub8 %a0,%a1,%v0", a, b)
167#define maxsb8(a, b) asm ("maxsb8 %a0,%a1,%v0", a, b)
168#define maxuw4(a, b) asm ("maxuw4 %a0,%a1,%v0", a, b)
169#define maxsw4(a, b) asm ("maxsw4 %a0,%a1,%v0", a, b)
170#define perr(a, b) asm ("perr %a0,%a1,%v0", a, b)
171#define pklb(a) asm ("pklb %a0,%v0", a)
172#define pkwb(a) asm ("pkwb %a0,%v0", a)
173#define unpkbl(a) asm ("unpkbl %a0,%v0", a)
174#define unpkbw(a) asm ("unpkbw %a0,%v0", a)
175#define wh64(a) asm ("wh64 %a0", a)
178#error "Unknown compiler!"
static uint64_t WORD_VEC(uint64_t x)
Definition alpha_asm.h:68
static uint64_t BYTE_VEC(uint64_t x)
Definition alpha_asm.h:61
char **__environ __attribute__((weak, alias("fake__environ")))