root / lab4 / .minix-src / include / clang-3.6 / bmiintrin.h @ 13
History | View | Annotate | Download (4.79 KB)
1 | 13 | up20180614 | /*===---- bmiintrin.h - BMI intrinsics -------------------------------------===
|
---|---|---|---|
2 | *
|
||
3 | * Permission is hereby granted, free of charge, to any person obtaining a copy
|
||
4 | * of this software and associated documentation files (the "Software"), to deal
|
||
5 | * in the Software without restriction, including without limitation the rights
|
||
6 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||
7 | * copies of the Software, and to permit persons to whom the Software is
|
||
8 | * furnished to do so, subject to the following conditions:
|
||
9 | *
|
||
10 | * The above copyright notice and this permission notice shall be included in
|
||
11 | * all copies or substantial portions of the Software.
|
||
12 | *
|
||
13 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||
14 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||
15 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||
16 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||
17 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||
18 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||
19 | * THE SOFTWARE.
|
||
20 | *
|
||
21 | *===-----------------------------------------------------------------------===
|
||
22 | */
|
||
23 | |||
24 | #if !defined __X86INTRIN_H && !defined __IMMINTRIN_H
|
||
25 | #error "Never use <bmiintrin.h> directly; include <x86intrin.h> instead." |
||
26 | #endif
|
||
27 | |||
28 | #ifndef __BMI__
|
||
29 | # error "BMI instruction set not enabled" |
||
30 | #endif /* __BMI__ */ |
||
31 | |||
32 | #ifndef __BMIINTRIN_H
|
||
33 | #define __BMIINTRIN_H
|
||
34 | |||
35 | #define _tzcnt_u16(a) (__tzcnt_u16((a)))
|
||
36 | #define _andn_u32(a, b) (__andn_u32((a), (b)))
|
||
37 | /* _bextr_u32 != __bextr_u32 */
|
||
38 | #define _blsi_u32(a) (__blsi_u32((a)))
|
||
39 | #define _blsmsk_u32(a) (__blsmsk_u32((a)))
|
||
40 | #define _blsr_u32(a) (__blsr_u32((a)))
|
||
41 | #define _tzcnt_u32(a) (__tzcnt_u32((a)))
|
||
42 | |||
43 | static __inline__ unsigned short __attribute__((__always_inline__, __nodebug__)) |
||
44 | __tzcnt_u16(unsigned short __X) |
||
45 | { |
||
46 | return __X ? __builtin_ctzs(__X) : 16; |
||
47 | } |
||
48 | |||
49 | static __inline__ unsigned int __attribute__((__always_inline__, __nodebug__)) |
||
50 | __andn_u32(unsigned int __X, unsigned int __Y) |
||
51 | { |
||
52 | return ~__X & __Y;
|
||
53 | } |
||
54 | |||
55 | /* AMD-specified, double-leading-underscore version of BEXTR */
|
||
56 | static __inline__ unsigned int __attribute__((__always_inline__, __nodebug__)) |
||
57 | __bextr_u32(unsigned int __X, unsigned int __Y) |
||
58 | { |
||
59 | return __builtin_ia32_bextr_u32(__X, __Y);
|
||
60 | } |
||
61 | |||
62 | /* Intel-specified, single-leading-underscore version of BEXTR */
|
||
63 | static __inline__ unsigned int __attribute__((__always_inline__, __nodebug__)) |
||
64 | _bextr_u32(unsigned int __X, unsigned int __Y, unsigned int __Z) |
||
65 | { |
||
66 | return __builtin_ia32_bextr_u32 (__X, ((__Y & 0xff) | ((__Z & 0xff) << 8))); |
||
67 | } |
||
68 | |||
69 | static __inline__ unsigned int __attribute__((__always_inline__, __nodebug__)) |
||
70 | __blsi_u32(unsigned int __X) |
||
71 | { |
||
72 | return __X & -__X;
|
||
73 | } |
||
74 | |||
75 | static __inline__ unsigned int __attribute__((__always_inline__, __nodebug__)) |
||
76 | __blsmsk_u32(unsigned int __X) |
||
77 | { |
||
78 | return __X ^ (__X - 1); |
||
79 | } |
||
80 | |||
81 | static __inline__ unsigned int __attribute__((__always_inline__, __nodebug__)) |
||
82 | __blsr_u32(unsigned int __X) |
||
83 | { |
||
84 | return __X & (__X - 1); |
||
85 | } |
||
86 | |||
87 | static __inline__ unsigned int __attribute__((__always_inline__, __nodebug__)) |
||
88 | __tzcnt_u32(unsigned int __X) |
||
89 | { |
||
90 | return __X ? __builtin_ctz(__X) : 32; |
||
91 | } |
||
92 | |||
93 | #ifdef __x86_64__
|
||
94 | |||
95 | #define _andn_u64(a, b) (__andn_u64((a), (b)))
|
||
96 | /* _bextr_u64 != __bextr_u64 */
|
||
97 | #define _blsi_u64(a) (__blsi_u64((a)))
|
||
98 | #define _blsmsk_u64(a) (__blsmsk_u64((a)))
|
||
99 | #define _blsr_u64(a) (__blsr_u64((a)))
|
||
100 | #define _tzcnt_u64(a) (__tzcnt_u64((a)))
|
||
101 | |||
102 | static __inline__ unsigned long long __attribute__((__always_inline__, __nodebug__)) |
||
103 | __andn_u64 (unsigned long long __X, unsigned long long __Y) |
||
104 | { |
||
105 | return ~__X & __Y;
|
||
106 | } |
||
107 | |||
108 | /* AMD-specified, double-leading-underscore version of BEXTR */
|
||
109 | static __inline__ unsigned long long __attribute__((__always_inline__, __nodebug__)) |
||
110 | __bextr_u64(unsigned long long __X, unsigned long long __Y) |
||
111 | { |
||
112 | return __builtin_ia32_bextr_u64(__X, __Y);
|
||
113 | } |
||
114 | |||
115 | /* Intel-specified, single-leading-underscore version of BEXTR */
|
||
116 | static __inline__ unsigned long long __attribute__((__always_inline__, __nodebug__)) |
||
117 | _bextr_u64(unsigned long long __X, unsigned int __Y, unsigned int __Z) |
||
118 | { |
||
119 | return __builtin_ia32_bextr_u64 (__X, ((__Y & 0xff) | ((__Z & 0xff) << 8))); |
||
120 | } |
||
121 | |||
122 | static __inline__ unsigned long long __attribute__((__always_inline__, __nodebug__)) |
||
123 | __blsi_u64(unsigned long long __X) |
||
124 | { |
||
125 | return __X & -__X;
|
||
126 | } |
||
127 | |||
128 | static __inline__ unsigned long long __attribute__((__always_inline__, __nodebug__)) |
||
129 | __blsmsk_u64(unsigned long long __X) |
||
130 | { |
||
131 | return __X ^ (__X - 1); |
||
132 | } |
||
133 | |||
134 | static __inline__ unsigned long long __attribute__((__always_inline__, __nodebug__)) |
||
135 | __blsr_u64(unsigned long long __X) |
||
136 | { |
||
137 | return __X & (__X - 1); |
||
138 | } |
||
139 | |||
140 | static __inline__ unsigned long long __attribute__((__always_inline__, __nodebug__)) |
||
141 | __tzcnt_u64(unsigned long long __X) |
||
142 | { |
||
143 | return __X ? __builtin_ctzll(__X) : 64; |
||
144 | } |
||
145 | |||
146 | #endif /* __x86_64__ */ |
||
147 | |||
148 | #endif /* __BMIINTRIN_H */ |