Subversion Repositories QNX 8.QNX8 LLVM/Clang compiler suite

Rev

Details | Last modification | View Log | RSS feed

Rev Author Line No. Line
14 pmbaty 1
/* ===-------- intrin.h ---------------------------------------------------===
2
 *
3
 * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
 * See https://llvm.org/LICENSE.txt for license information.
5
 * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
 *
7
 *===-----------------------------------------------------------------------===
8
 */
9
 
10
/* Only include this if we're compiling for the windows platform. */
11
#ifndef _MSC_VER
12
#include_next <intrin.h>
13
#else
14
 
15
#ifndef __INTRIN_H
16
#define __INTRIN_H
17
 
18
/* First include the standard intrinsics. */
19
#if defined(__i386__) || defined(__x86_64__)
20
#include <x86intrin.h>
21
#endif
22
 
23
#if defined(__arm__)
24
#include <armintr.h>
25
#endif
26
 
27
#if defined(__aarch64__)
28
#include <arm64intr.h>
29
#endif
30
 
31
/* For the definition of jmp_buf. */
32
#if __STDC_HOSTED__
33
#include <setjmp.h>
34
#endif
35
 
36
/* Define the default attributes for the functions in this file. */
37
#define __DEFAULT_FN_ATTRS __attribute__((__always_inline__, __nodebug__))
38
 
39
#if __x86_64__
40
#define __LPTRINT_TYPE__ __int64
41
#else
42
#define __LPTRINT_TYPE__ long
43
#endif
44
 
45
#ifdef __cplusplus
46
extern "C" {
47
#endif
48
 
49
#if defined(__MMX__)
50
/* And the random ones that aren't in those files. */
51
__m64 _m_from_float(float);
52
float _m_to_float(__m64);
53
#endif
54
 
55
/* Other assorted instruction intrinsics. */
56
void __addfsbyte(unsigned long, unsigned char);
57
void __addfsdword(unsigned long, unsigned long);
58
void __addfsword(unsigned long, unsigned short);
59
void __code_seg(const char *);
60
void __cpuid(int[4], int);
61
void __cpuidex(int[4], int, int);
62
__int64 __emul(int, int);
63
unsigned __int64 __emulu(unsigned int, unsigned int);
64
unsigned int __getcallerseflags(void);
65
void __halt(void);
66
unsigned char __inbyte(unsigned short);
67
void __inbytestring(unsigned short, unsigned char *, unsigned long);
68
void __incfsbyte(unsigned long);
69
void __incfsdword(unsigned long);
70
void __incfsword(unsigned long);
71
unsigned long __indword(unsigned short);
72
void __indwordstring(unsigned short, unsigned long *, unsigned long);
73
void __int2c(void);
74
void __invlpg(void *);
75
unsigned short __inword(unsigned short);
76
void __inwordstring(unsigned short, unsigned short *, unsigned long);
77
void __lidt(void *);
78
unsigned __int64 __ll_lshift(unsigned __int64, int);
79
__int64 __ll_rshift(__int64, int);
80
void __movsb(unsigned char *, unsigned char const *, size_t);
81
void __movsd(unsigned long *, unsigned long const *, size_t);
82
void __movsw(unsigned short *, unsigned short const *, size_t);
83
void __nop(void);
84
void __nvreg_restore_fence(void);
85
void __nvreg_save_fence(void);
86
void __outbyte(unsigned short, unsigned char);
87
void __outbytestring(unsigned short, unsigned char *, unsigned long);
88
void __outdword(unsigned short, unsigned long);
89
void __outdwordstring(unsigned short, unsigned long *, unsigned long);
90
void __outword(unsigned short, unsigned short);
91
void __outwordstring(unsigned short, unsigned short *, unsigned long);
92
unsigned long __readcr0(void);
93
unsigned long __readcr2(void);
94
unsigned __LPTRINT_TYPE__ __readcr3(void);
95
unsigned long __readcr4(void);
96
unsigned long __readcr8(void);
97
unsigned int __readdr(unsigned int);
98
#ifdef __i386__
99
unsigned char __readfsbyte(unsigned long);
100
unsigned short __readfsword(unsigned long);
101
unsigned long __readfsdword(unsigned long);
102
unsigned __int64 __readfsqword(unsigned long);
103
#endif
104
unsigned __int64 __readmsr(unsigned long);
105
unsigned __int64 __readpmc(unsigned long);
106
unsigned long __segmentlimit(unsigned long);
107
void __sidt(void *);
108
void __stosb(unsigned char *, unsigned char, size_t);
109
void __stosd(unsigned long *, unsigned long, size_t);
110
void __stosw(unsigned short *, unsigned short, size_t);
111
void __svm_clgi(void);
112
void __svm_invlpga(void *, int);
113
void __svm_skinit(int);
114
void __svm_stgi(void);
115
void __svm_vmload(size_t);
116
void __svm_vmrun(size_t);
117
void __svm_vmsave(size_t);
118
void __ud2(void);
119
unsigned __int64 __ull_rshift(unsigned __int64, int);
120
void __vmx_off(void);
121
void __vmx_vmptrst(unsigned __int64 *);
122
void __wbinvd(void);
123
void __writecr0(unsigned int);
124
void __writecr3(unsigned __INTPTR_TYPE__);
125
void __writecr4(unsigned int);
126
void __writecr8(unsigned int);
127
void __writedr(unsigned int, unsigned int);
128
void __writefsbyte(unsigned long, unsigned char);
129
void __writefsdword(unsigned long, unsigned long);
130
void __writefsqword(unsigned long, unsigned __int64);
131
void __writefsword(unsigned long, unsigned short);
132
void __writemsr(unsigned long, unsigned __int64);
133
void *_AddressOfReturnAddress(void);
134
unsigned char _BitScanForward(unsigned long *_Index, unsigned long _Mask);
135
unsigned char _BitScanReverse(unsigned long *_Index, unsigned long _Mask);
136
unsigned char _bittest(long const *, long);
137
unsigned char _bittestandcomplement(long *, long);
138
unsigned char _bittestandreset(long *, long);
139
unsigned char _bittestandset(long *, long);
140
void __cdecl _disable(void);
141
void __cdecl _enable(void);
142
long _InterlockedAddLargeStatistic(__int64 volatile *_Addend, long _Value);
143
unsigned char _interlockedbittestandreset(long volatile *, long);
144
unsigned char _interlockedbittestandset(long volatile *, long);
145
void *_InterlockedCompareExchangePointer_HLEAcquire(void *volatile *, void *,
146
                                                    void *);
147
void *_InterlockedCompareExchangePointer_HLERelease(void *volatile *, void *,
148
                                                    void *);
149
long _InterlockedExchangeAdd_HLEAcquire(long volatile *, long);
150
long _InterlockedExchangeAdd_HLERelease(long volatile *, long);
151
__int64 _InterlockedExchangeAdd64_HLEAcquire(__int64 volatile *, __int64);
152
__int64 _InterlockedExchangeAdd64_HLERelease(__int64 volatile *, __int64);
153
void _ReadBarrier(void);
154
void _ReadWriteBarrier(void);
155
unsigned int _rorx_u32(unsigned int, const unsigned int);
156
int _sarx_i32(int, unsigned int);
157
#if __STDC_HOSTED__
158
int __cdecl _setjmp(jmp_buf);
159
#endif
160
unsigned int _shlx_u32(unsigned int, unsigned int);
161
unsigned int _shrx_u32(unsigned int, unsigned int);
162
void _Store_HLERelease(long volatile *, long);
163
void _Store64_HLERelease(__int64 volatile *, __int64);
164
void _StorePointer_HLERelease(void *volatile *, void *);
165
void _WriteBarrier(void);
166
unsigned __int32 xbegin(void);
167
void _xend(void);
168
 
169
/* These additional intrinsics are turned on in x64/amd64/x86_64 mode. */
170
#ifdef __x86_64__
171
void __addgsbyte(unsigned long, unsigned char);
172
void __addgsdword(unsigned long, unsigned long);
173
void __addgsqword(unsigned long, unsigned __int64);
174
void __addgsword(unsigned long, unsigned short);
175
void __faststorefence(void);
176
void __incgsbyte(unsigned long);
177
void __incgsdword(unsigned long);
178
void __incgsqword(unsigned long);
179
void __incgsword(unsigned long);
180
void __movsq(unsigned long long *, unsigned long long const *, size_t);
181
unsigned char __readgsbyte(unsigned long);
182
unsigned long __readgsdword(unsigned long);
183
unsigned __int64 __readgsqword(unsigned long);
184
unsigned short __readgsword(unsigned long);
185
unsigned __int64 __shiftleft128(unsigned __int64 _LowPart,
186
                                unsigned __int64 _HighPart,
187
                                unsigned char _Shift);
188
unsigned __int64 __shiftright128(unsigned __int64 _LowPart,
189
                                 unsigned __int64 _HighPart,
190
                                 unsigned char _Shift);
191
void __stosq(unsigned __int64 *, unsigned __int64, size_t);
192
unsigned char __vmx_on(unsigned __int64 *);
193
unsigned char __vmx_vmclear(unsigned __int64 *);
194
unsigned char __vmx_vmlaunch(void);
195
unsigned char __vmx_vmptrld(unsigned __int64 *);
196
unsigned char __vmx_vmread(size_t, size_t *);
197
unsigned char __vmx_vmresume(void);
198
unsigned char __vmx_vmwrite(size_t, size_t);
199
void __writegsbyte(unsigned long, unsigned char);
200
void __writegsdword(unsigned long, unsigned long);
201
void __writegsqword(unsigned long, unsigned __int64);
202
void __writegsword(unsigned long, unsigned short);
203
unsigned char _bittest64(__int64 const *, __int64);
204
unsigned char _bittestandcomplement64(__int64 *, __int64);
205
unsigned char _bittestandreset64(__int64 *, __int64);
206
unsigned char _bittestandset64(__int64 *, __int64);
207
long _InterlockedAnd_np(long volatile *_Value, long _Mask);
208
short _InterlockedAnd16_np(short volatile *_Value, short _Mask);
209
__int64 _InterlockedAnd64_np(__int64 volatile *_Value, __int64 _Mask);
210
char _InterlockedAnd8_np(char volatile *_Value, char _Mask);
211
unsigned char _interlockedbittestandreset64(__int64 volatile *, __int64);
212
unsigned char _interlockedbittestandset64(__int64 volatile *, __int64);
213
long _InterlockedCompareExchange_np(long volatile *_Destination, long _Exchange,
214
                                    long _Comparand);
215
unsigned char _InterlockedCompareExchange128_np(__int64 volatile *_Destination,
216
                                                __int64 _ExchangeHigh,
217
                                                __int64 _ExchangeLow,
218
                                                __int64 *_ComparandResult);
219
short _InterlockedCompareExchange16_np(short volatile *_Destination,
220
                                       short _Exchange, short _Comparand);
221
__int64 _InterlockedCompareExchange64_np(__int64 volatile *_Destination,
222
                                         __int64 _Exchange, __int64 _Comparand);
223
void *_InterlockedCompareExchangePointer_np(void *volatile *_Destination,
224
                                            void *_Exchange, void *_Comparand);
225
long _InterlockedOr_np(long volatile *_Value, long _Mask);
226
short _InterlockedOr16_np(short volatile *_Value, short _Mask);
227
__int64 _InterlockedOr64_np(__int64 volatile *_Value, __int64 _Mask);
228
char _InterlockedOr8_np(char volatile *_Value, char _Mask);
229
long _InterlockedXor_np(long volatile *_Value, long _Mask);
230
short _InterlockedXor16_np(short volatile *_Value, short _Mask);
231
__int64 _InterlockedXor64_np(__int64 volatile *_Value, __int64 _Mask);
232
char _InterlockedXor8_np(char volatile *_Value, char _Mask);
233
unsigned __int64 _rorx_u64(unsigned __int64, const unsigned int);
234
__int64 _sarx_i64(__int64, unsigned int);
235
unsigned __int64 _shlx_u64(unsigned __int64, unsigned int);
236
unsigned __int64 _shrx_u64(unsigned __int64, unsigned int);
237
__int64 __mulh(__int64, __int64);
238
unsigned __int64 __umulh(unsigned __int64, unsigned __int64);
239
__int64 _mul128(__int64, __int64, __int64*);
240
unsigned __int64 _umul128(unsigned __int64,
241
                          unsigned __int64,
242
                          unsigned __int64*);
243
 
244
#endif /* __x86_64__ */
245
 
246
#if defined(__x86_64__) || defined(__arm__) || defined(__aarch64__)
247
 
248
unsigned char _BitScanForward64(unsigned long *_Index, unsigned __int64 _Mask);
249
unsigned char _BitScanReverse64(unsigned long *_Index, unsigned __int64 _Mask);
250
 
251
#endif
252
 
253
#if defined(__i386__) || defined(__x86_64__) || defined(__arm__) || defined(__aarch64__)
254
__int64 _InterlockedDecrement64(__int64 volatile *_Addend);
255
__int64 _InterlockedExchange64(__int64 volatile *_Target, __int64 _Value);
256
__int64 _InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value);
257
__int64 _InterlockedExchangeSub64(__int64 volatile *_Subend, __int64 _Value);
258
__int64 _InterlockedIncrement64(__int64 volatile *_Addend);
259
__int64 _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask);
260
__int64 _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask);
261
__int64 _InterlockedAnd64(__int64 volatile *_Value, __int64 _Mask);
262
 
263
#endif
264
 
265
/*----------------------------------------------------------------------------*\
266
|* Interlocked Exchange Add
267
\*----------------------------------------------------------------------------*/
268
#if defined(__arm__) || defined(__aarch64__)
269
char _InterlockedExchangeAdd8_acq(char volatile *_Addend, char _Value);
270
char _InterlockedExchangeAdd8_nf(char volatile *_Addend, char _Value);
271
char _InterlockedExchangeAdd8_rel(char volatile *_Addend, char _Value);
272
short _InterlockedExchangeAdd16_acq(short volatile *_Addend, short _Value);
273
short _InterlockedExchangeAdd16_nf(short volatile *_Addend, short _Value);
274
short _InterlockedExchangeAdd16_rel(short volatile *_Addend, short _Value);
275
long _InterlockedExchangeAdd_acq(long volatile *_Addend, long _Value);
276
long _InterlockedExchangeAdd_nf(long volatile *_Addend, long _Value);
277
long _InterlockedExchangeAdd_rel(long volatile *_Addend, long _Value);
278
__int64 _InterlockedExchangeAdd64_acq(__int64 volatile *_Addend, __int64 _Value);
279
__int64 _InterlockedExchangeAdd64_nf(__int64 volatile *_Addend, __int64 _Value);
280
__int64 _InterlockedExchangeAdd64_rel(__int64 volatile *_Addend, __int64 _Value);
281
#endif
282
/*----------------------------------------------------------------------------*\
283
|* Interlocked Increment
284
\*----------------------------------------------------------------------------*/
285
#if defined(__arm__) || defined(__aarch64__)
286
short _InterlockedIncrement16_acq(short volatile *_Value);
287
short _InterlockedIncrement16_nf(short volatile *_Value);
288
short _InterlockedIncrement16_rel(short volatile *_Value);
289
long _InterlockedIncrement_acq(long volatile *_Value);
290
long _InterlockedIncrement_nf(long volatile *_Value);
291
long _InterlockedIncrement_rel(long volatile *_Value);
292
__int64 _InterlockedIncrement64_acq(__int64 volatile *_Value);
293
__int64 _InterlockedIncrement64_nf(__int64 volatile *_Value);
294
__int64 _InterlockedIncrement64_rel(__int64 volatile *_Value);
295
#endif
296
/*----------------------------------------------------------------------------*\
297
|* Interlocked Decrement
298
\*----------------------------------------------------------------------------*/
299
#if defined(__arm__) || defined(__aarch64__)
300
short _InterlockedDecrement16_acq(short volatile *_Value);
301
short _InterlockedDecrement16_nf(short volatile *_Value);
302
short _InterlockedDecrement16_rel(short volatile *_Value);
303
long _InterlockedDecrement_acq(long volatile *_Value);
304
long _InterlockedDecrement_nf(long volatile *_Value);
305
long _InterlockedDecrement_rel(long volatile *_Value);
306
__int64 _InterlockedDecrement64_acq(__int64 volatile *_Value);
307
__int64 _InterlockedDecrement64_nf(__int64 volatile *_Value);
308
__int64 _InterlockedDecrement64_rel(__int64 volatile *_Value);
309
#endif
310
/*----------------------------------------------------------------------------*\
311
|* Interlocked And
312
\*----------------------------------------------------------------------------*/
313
#if defined(__arm__) || defined(__aarch64__)
314
char _InterlockedAnd8_acq(char volatile *_Value, char _Mask);
315
char _InterlockedAnd8_nf(char volatile *_Value, char _Mask);
316
char _InterlockedAnd8_rel(char volatile *_Value, char _Mask);
317
short _InterlockedAnd16_acq(short volatile *_Value, short _Mask);
318
short _InterlockedAnd16_nf(short volatile *_Value, short _Mask);
319
short _InterlockedAnd16_rel(short volatile *_Value, short _Mask);
320
long _InterlockedAnd_acq(long volatile *_Value, long _Mask);
321
long _InterlockedAnd_nf(long volatile *_Value, long _Mask);
322
long _InterlockedAnd_rel(long volatile *_Value, long _Mask);
323
__int64 _InterlockedAnd64_acq(__int64 volatile *_Value, __int64 _Mask);
324
__int64 _InterlockedAnd64_nf(__int64 volatile *_Value, __int64 _Mask);
325
__int64 _InterlockedAnd64_rel(__int64 volatile *_Value, __int64 _Mask);
326
#endif
327
/*----------------------------------------------------------------------------*\
328
|* Bit Counting and Testing
329
\*----------------------------------------------------------------------------*/
330
#if defined(__arm__) || defined(__aarch64__)
331
unsigned char _interlockedbittestandset_acq(long volatile *_BitBase,
332
                                            long _BitPos);
333
unsigned char _interlockedbittestandset_nf(long volatile *_BitBase,
334
                                           long _BitPos);
335
unsigned char _interlockedbittestandset_rel(long volatile *_BitBase,
336
                                            long _BitPos);
337
unsigned char _interlockedbittestandreset_acq(long volatile *_BitBase,
338
                                              long _BitPos);
339
unsigned char _interlockedbittestandreset_nf(long volatile *_BitBase,
340
                                             long _BitPos);
341
unsigned char _interlockedbittestandreset_rel(long volatile *_BitBase,
342
                                              long _BitPos);
343
#endif
344
/*----------------------------------------------------------------------------*\
345
|* Interlocked Or
346
\*----------------------------------------------------------------------------*/
347
#if defined(__arm__) || defined(__aarch64__)
348
char _InterlockedOr8_acq(char volatile *_Value, char _Mask);
349
char _InterlockedOr8_nf(char volatile *_Value, char _Mask);
350
char _InterlockedOr8_rel(char volatile *_Value, char _Mask);
351
short _InterlockedOr16_acq(short volatile *_Value, short _Mask);
352
short _InterlockedOr16_nf(short volatile *_Value, short _Mask);
353
short _InterlockedOr16_rel(short volatile *_Value, short _Mask);
354
long _InterlockedOr_acq(long volatile *_Value, long _Mask);
355
long _InterlockedOr_nf(long volatile *_Value, long _Mask);
356
long _InterlockedOr_rel(long volatile *_Value, long _Mask);
357
__int64 _InterlockedOr64_acq(__int64 volatile *_Value, __int64 _Mask);
358
__int64 _InterlockedOr64_nf(__int64 volatile *_Value, __int64 _Mask);
359
__int64 _InterlockedOr64_rel(__int64 volatile *_Value, __int64 _Mask);
360
#endif
361
/*----------------------------------------------------------------------------*\
362
|* Interlocked Xor
363
\*----------------------------------------------------------------------------*/
364
#if defined(__arm__) || defined(__aarch64__)
365
char _InterlockedXor8_acq(char volatile *_Value, char _Mask);
366
char _InterlockedXor8_nf(char volatile *_Value, char _Mask);
367
char _InterlockedXor8_rel(char volatile *_Value, char _Mask);
368
short _InterlockedXor16_acq(short volatile *_Value, short _Mask);
369
short _InterlockedXor16_nf(short volatile *_Value, short _Mask);
370
short _InterlockedXor16_rel(short volatile *_Value, short _Mask);
371
long _InterlockedXor_acq(long volatile *_Value, long _Mask);
372
long _InterlockedXor_nf(long volatile *_Value, long _Mask);
373
long _InterlockedXor_rel(long volatile *_Value, long _Mask);
374
__int64 _InterlockedXor64_acq(__int64 volatile *_Value, __int64 _Mask);
375
__int64 _InterlockedXor64_nf(__int64 volatile *_Value, __int64 _Mask);
376
__int64 _InterlockedXor64_rel(__int64 volatile *_Value, __int64 _Mask);
377
#endif
378
/*----------------------------------------------------------------------------*\
379
|* Interlocked Exchange
380
\*----------------------------------------------------------------------------*/
381
#if defined(__arm__) || defined(__aarch64__)
382
char _InterlockedExchange8_acq(char volatile *_Target, char _Value);
383
char _InterlockedExchange8_nf(char volatile *_Target, char _Value);
384
char _InterlockedExchange8_rel(char volatile *_Target, char _Value);
385
short _InterlockedExchange16_acq(short volatile *_Target, short _Value);
386
short _InterlockedExchange16_nf(short volatile *_Target, short _Value);
387
short _InterlockedExchange16_rel(short volatile *_Target, short _Value);
388
long _InterlockedExchange_acq(long volatile *_Target, long _Value);
389
long _InterlockedExchange_nf(long volatile *_Target, long _Value);
390
long _InterlockedExchange_rel(long volatile *_Target, long _Value);
391
__int64 _InterlockedExchange64_acq(__int64 volatile *_Target, __int64 _Value);
392
__int64 _InterlockedExchange64_nf(__int64 volatile *_Target, __int64 _Value);
393
__int64 _InterlockedExchange64_rel(__int64 volatile *_Target, __int64 _Value);
394
#endif
395
/*----------------------------------------------------------------------------*\
396
|* Interlocked Compare Exchange
397
\*----------------------------------------------------------------------------*/
398
#if defined(__arm__) || defined(__aarch64__)
399
char _InterlockedCompareExchange8_acq(char volatile *_Destination,
400
                             char _Exchange, char _Comparand);
401
char _InterlockedCompareExchange8_nf(char volatile *_Destination,
402
                             char _Exchange, char _Comparand);
403
char _InterlockedCompareExchange8_rel(char volatile *_Destination,
404
                             char _Exchange, char _Comparand);
405
short _InterlockedCompareExchange16_acq(short volatile *_Destination,
406
                              short _Exchange, short _Comparand);
407
short _InterlockedCompareExchange16_nf(short volatile *_Destination,
408
                              short _Exchange, short _Comparand);
409
short _InterlockedCompareExchange16_rel(short volatile *_Destination,
410
                              short _Exchange, short _Comparand);
411
long _InterlockedCompareExchange_acq(long volatile *_Destination,
412
                              long _Exchange, long _Comparand);
413
long _InterlockedCompareExchange_nf(long volatile *_Destination,
414
                              long _Exchange, long _Comparand);
415
long _InterlockedCompareExchange_rel(long volatile *_Destination,
416
                              long _Exchange, long _Comparand);
417
__int64 _InterlockedCompareExchange64_acq(__int64 volatile *_Destination,
418
                              __int64 _Exchange, __int64 _Comparand);
419
__int64 _InterlockedCompareExchange64_nf(__int64 volatile *_Destination,
420
                              __int64 _Exchange, __int64 _Comparand);
421
__int64 _InterlockedCompareExchange64_rel(__int64 volatile *_Destination,
422
                              __int64 _Exchange, __int64 _Comparand);
423
#endif
424
#if defined(__x86_64__) || defined(__aarch64__)
425
unsigned char _InterlockedCompareExchange128(__int64 volatile *_Destination,
426
                                             __int64 _ExchangeHigh,
427
                                             __int64 _ExchangeLow,
428
                                             __int64 *_ComparandResult);
429
#endif
430
#if defined(__aarch64__)
431
unsigned char _InterlockedCompareExchange128_acq(__int64 volatile *_Destination,
432
                                                 __int64 _ExchangeHigh,
433
                                                 __int64 _ExchangeLow,
434
                                                 __int64 *_ComparandResult);
435
unsigned char _InterlockedCompareExchange128_nf(__int64 volatile *_Destination,
436
                                                __int64 _ExchangeHigh,
437
                                                __int64 _ExchangeLow,
438
                                                __int64 *_ComparandResult);
439
unsigned char _InterlockedCompareExchange128_rel(__int64 volatile *_Destination,
440
                                                 __int64 _ExchangeHigh,
441
                                                 __int64 _ExchangeLow,
442
                                                 __int64 *_ComparandResult);
443
#endif
444
 
445
/*----------------------------------------------------------------------------*\
446
|* movs, stos
447
\*----------------------------------------------------------------------------*/
448
#if defined(__i386__) || defined(__x86_64__)
449
static __inline__ void __DEFAULT_FN_ATTRS __movsb(unsigned char *__dst,
450
                                                  unsigned char const *__src,
451
                                                  size_t __n) {
452
#if defined(__x86_64__)
453
  __asm__ __volatile__("rep movsb"
454
                       : "+D"(__dst), "+S"(__src), "+c"(__n)
455
                       :
456
                       : "memory");
457
#else
458
  __asm__ __volatile__("xchg {%%esi, %1|%1, esi}\n"
459
                       "rep movsb\n"
460
                       "xchg {%%esi, %1|%1, esi}"
461
                       : "+D"(__dst), "+r"(__src), "+c"(__n)
462
                       :
463
                       : "memory");
464
#endif
465
}
466
static __inline__ void __DEFAULT_FN_ATTRS __movsd(unsigned long *__dst,
467
                                                  unsigned long const *__src,
468
                                                  size_t __n) {
469
#if defined(__x86_64__)
470
  __asm__ __volatile__("rep movs{l|d}"
471
                       : "+D"(__dst), "+S"(__src), "+c"(__n)
472
                       :
473
                       : "memory");
474
#else
475
  __asm__ __volatile__("xchg {%%esi, %1|%1, esi}\n"
476
                       "rep movs{l|d}\n"
477
                       "xchg {%%esi, %1|%1, esi}"
478
                       : "+D"(__dst), "+r"(__src), "+c"(__n)
479
                       :
480
                       : "memory");
481
#endif
482
}
483
static __inline__ void __DEFAULT_FN_ATTRS __movsw(unsigned short *__dst,
484
                                                  unsigned short const *__src,
485
                                                  size_t __n) {
486
#if defined(__x86_64__)
487
  __asm__ __volatile__("rep movsw"
488
                       : "+D"(__dst), "+S"(__src), "+c"(__n)
489
                       :
490
                       : "memory");
491
#else
492
  __asm__ __volatile__("xchg {%%esi, %1|%1, esi}\n"
493
                       "rep movsw\n"
494
                       "xchg {%%esi, %1|%1, esi}"
495
                       : "+D"(__dst), "+r"(__src), "+c"(__n)
496
                       :
497
                       : "memory");
498
#endif
499
}
500
static __inline__ void __DEFAULT_FN_ATTRS __stosd(unsigned long *__dst,
501
                                                  unsigned long __x,
502
                                                  size_t __n) {
503
  __asm__ __volatile__("rep stos{l|d}"
504
                       : "+D"(__dst), "+c"(__n)
505
                       : "a"(__x)
506
                       : "memory");
507
}
508
static __inline__ void __DEFAULT_FN_ATTRS __stosw(unsigned short *__dst,
509
                                                  unsigned short __x,
510
                                                  size_t __n) {
511
  __asm__ __volatile__("rep stosw"
512
                       : "+D"(__dst), "+c"(__n)
513
                       : "a"(__x)
514
                       : "memory");
515
}
516
#endif
517
#ifdef __x86_64__
518
static __inline__ void __DEFAULT_FN_ATTRS __movsq(
519
    unsigned long long *__dst, unsigned long long const *__src, size_t __n) {
520
  __asm__ __volatile__("rep movsq"
521
                       : "+D"(__dst), "+S"(__src), "+c"(__n)
522
                       :
523
                       : "memory");
524
}
525
static __inline__ void __DEFAULT_FN_ATTRS __stosq(unsigned __int64 *__dst,
526
                                                  unsigned __int64 __x,
527
                                                  size_t __n) {
528
  __asm__ __volatile__("rep stosq" : "+D"(__dst), "+c"(__n) : "a"(__x)
529
                       : "memory");
530
}
531
#endif
532
 
533
/*----------------------------------------------------------------------------*\
534
|* Misc
535
\*----------------------------------------------------------------------------*/
536
#if defined(__i386__) || defined(__x86_64__)
537
static __inline__ void __DEFAULT_FN_ATTRS __halt(void) {
538
  __asm__ volatile("hlt");
539
}
540
#endif
541
 
542
#if defined(__i386__) || defined(__x86_64__) || defined(__aarch64__)
543
static __inline__ void __DEFAULT_FN_ATTRS __nop(void) {
544
  __asm__ volatile("nop");
545
}
546
#endif
547
 
548
/*----------------------------------------------------------------------------*\
549
|* MS AArch64 specific
550
\*----------------------------------------------------------------------------*/
551
#if defined(__aarch64__)
552
unsigned __int64 __getReg(int);
553
long _InterlockedAdd(long volatile *Addend, long Value);
554
__int64 _ReadStatusReg(int);
555
void _WriteStatusReg(int, __int64);
556
 
557
unsigned short __cdecl _byteswap_ushort(unsigned short val);
558
unsigned long __cdecl _byteswap_ulong (unsigned long val);
559
unsigned __int64 __cdecl _byteswap_uint64(unsigned __int64 val);
560
 
561
__int64 __mulh(__int64 __a, __int64 __b);
562
unsigned __int64 __umulh(unsigned __int64 __a, unsigned __int64 __b);
563
 
564
void __break(int);
565
 
566
void __writex18byte(unsigned long offset, unsigned char data);
567
void __writex18word(unsigned long offset, unsigned short data);
568
void __writex18dword(unsigned long offset, unsigned long data);
569
void __writex18qword(unsigned long offset, unsigned __int64 data);
570
 
571
unsigned char __readx18byte(unsigned long offset);
572
unsigned short __readx18word(unsigned long offset);
573
unsigned long __readx18dword(unsigned long offset);
574
unsigned __int64 __readx18qword(unsigned long offset);
575
#endif
576
 
577
/*----------------------------------------------------------------------------*\
578
|* Privileged intrinsics
579
\*----------------------------------------------------------------------------*/
580
#if defined(__i386__) || defined(__x86_64__)
581
static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
582
__readmsr(unsigned long __register) {
583
  // Loads the contents of a 64-bit model specific register (MSR) specified in
584
  // the ECX register into registers EDX:EAX. The EDX register is loaded with
585
  // the high-order 32 bits of the MSR and the EAX register is loaded with the
586
  // low-order 32 bits. If less than 64 bits are implemented in the MSR being
587
  // read, the values returned to EDX:EAX in unimplemented bit locations are
588
  // undefined.
589
  unsigned long __edx;
590
  unsigned long __eax;
591
  __asm__ ("rdmsr" : "=d"(__edx), "=a"(__eax) : "c"(__register));
592
  return (((unsigned __int64)__edx) << 32) | (unsigned __int64)__eax;
593
}
594
#endif
595
 
596
static __inline__ unsigned __LPTRINT_TYPE__ __DEFAULT_FN_ATTRS __readcr3(void) {
597
  unsigned __LPTRINT_TYPE__ __cr3_val;
598
  __asm__ __volatile__(
599
                       "mov {%%cr3, %0|%0, cr3}"
600
                       : "=r"(__cr3_val)
601
                       :
602
                       : "memory");
603
  return __cr3_val;
604
}
605
 
606
static __inline__ void __DEFAULT_FN_ATTRS
607
__writecr3(unsigned __INTPTR_TYPE__ __cr3_val) {
608
  __asm__ ("mov {%0, %%cr3|cr3, %0}" : : "r"(__cr3_val) : "memory");
609
}
610
 
611
#ifdef __cplusplus
612
}
613
#endif
614
 
615
#undef __LPTRINT_TYPE__
616
 
617
#undef __DEFAULT_FN_ATTRS
618
 
619
#endif /* __INTRIN_H */
620
#endif /* _MSC_VER */