BitMagic-C++
bmdef.h
Go to the documentation of this file.
1/*
2Copyright(c) 2002-2017 Anatoliy Kuznetsov(anatoliy_kuznetsov at yahoo.com)
3
4Licensed under the Apache License, Version 2.0 (the "License");
5you may not use this file except in compliance with the License.
6You may obtain a copy of the License at
7
8 http://www.apache.org/licenses/LICENSE-2.0
9
10Unless required by applicable law or agreed to in writing, software
11distributed under the License is distributed on an "AS IS" BASIS,
12WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13See the License for the specific language governing permissions and
14limitations under the License.
15
16For more information please visit: http://bitmagic.io
17*/
18
19/*! \file bmdef.h
20 \brief Definitions(internal)
21*/
22
23#include <climits>
24#include <stdint.h>
25
26// Incorporate appropriate tuneups when the NCBI C++ Toolkit's core
27// headers have been included.
28//
29#ifdef NCBI_ASSERT
30# define BM_ASSERT _ASSERT
31
32# ifdef HAVE_RESTRICT_CXX
33# define BM_HASRESTRICT
34# define BMRESTRICT NCBI_RESTRICT
35# endif
36
37# if defined(NCBI_FORCEINLINE) && \
38 ( !defined(NCBI_COMPILER_GCC) || NCBI_COMPILER_VERSION >= 400 || \
39 defined(__OPTIMIZE__))
40# define BM_HASFORCEINLINE
41# define BMFORCEINLINE NCBI_FORCEINLINE
42# endif
43
44# ifdef NCBI_SSE
45# if NCBI_SSE >= 20
46# define BMSSE2OPT 1
47# endif
48# if NCBI_SSE >= 40
49# define BMSSE2OPT 1
50# endif
51# if NCBI_SSE >= 42
52# define BMSSE42OPT 1
53# endif
54# endif
55#endif
56
57
58// macro to define/undefine unaligned memory access (x86, PowerPC)
59//
60#if defined(__i386) || defined(__x86_64) || defined(__ppc__) || \
61 defined(__ppc64__) || defined(_M_IX86) || defined(_M_AMD64) || \
62 defined(_M_IX86) || defined(_M_AMD64) || defined(_M_X64) || \
63 defined(_M_ARM) || defined(_M_ARM64) || \
64 defined(__arm__) || defined(__aarch64__) || \
65 (defined(_M_MPPC) && !defined(BM_FORBID_UNALIGNED_ACCESS))
66#define BM_UNALIGNED_ACCESS_OK 1
67#endif
68
69#if defined(_M_IX86) || defined(_M_AMD64) || defined(_M_X64) || \
70 defined(__i386) || defined(__x86_64) || defined(_M_AMD64) || \
71 defined(BMSSE2OPT) || defined(BMSSE42OPT)
72#define BM_x86
73#endif
74
75// cxx11 features
76//
77#if defined(BM_NO_CXX11) || (defined(_MSC_VER) && _MSC_VER < 1900)
78# define BMNOEXCEPT
79# define BMNOEXCEPT2
80#else
81# ifndef BMNOEXCEPT
82# define BMNOEXCEPT noexcept
83#if defined(__EMSCRIPTEN__)
84#else
85# define BMNOEXCEPT2
86#endif
87# endif
88#endif
89
90// WebASM compilation settings
91//
92#if defined(__EMSCRIPTEN__)
93
94#undef BM_x86
95
96// EMSCRIPTEN specific tweaks
97// WebAssemply compiles into 32-bit memory system but offers 64-bit wordsize
98// WebASM also benefits from use GCC extensions (buildins like popcnt, lzcnt)
99//
100// BMNOEXCEPT2 is to declare "noexcept" for WebAsm only where needed
101// and silence GCC warnings
102//
103# define BM64OPT
104# define BM_USE_GCC_BUILD
105# define BMNOEXCEPT2 noexcept
106
107#else
108# define BMNOEXCEPT2
109#endif
110
111
112// Enable MSVC 8.0 (2005) specific optimization options
113//
114#if(_MSC_VER >= 1400)
115# define BM_HASFORCEINLINE
116# ifndef BMRESTRICT
117# define BMRESTRICT __restrict
118# endif
119#endif
120
121#ifdef __GNUG__
122# ifndef BMRESTRICT
123# define BMRESTRICT __restrict__
124# endif
125
126# ifdef __OPTIMIZE__
127# define BM_NOASSERT
128# endif
129#endif
130
131# ifdef NDEBUG
132# define BM_NOASSERT
133# endif
134
135
136#ifndef BM_ASSERT
137# ifndef BM_NOASSERT
138# include <cassert>
139# define BM_ASSERT assert
140# else
141# ifndef BM_ASSERT
142# define BM_ASSERT(x)
143# endif
144# endif
145#endif
146
147
148#if defined(__x86_64) || defined(_M_AMD64) || defined(_WIN64) || \
149 defined(__LP64__) || defined(_LP64) || ( __WORDSIZE == 64 )
150#ifndef BM64OPT
151# define BM64OPT
152#endif
153#endif
154
155
156
157#define FULL_BLOCK_REAL_ADDR bm::all_set<true>::_block._p
158#define FULL_BLOCK_FAKE_ADDR bm::all_set<true>::_block._p_fullp
159#define FULL_SUB_BLOCK_REAL_ADDR bm::all_set<true>::_block._s
160#define BLOCK_ADDR_SAN(addr) (addr == FULL_BLOCK_FAKE_ADDR) ? FULL_BLOCK_REAL_ADDR : addr
161#define IS_VALID_ADDR(addr) bm::all_set<true>::is_valid_block_addr(addr)
162#define IS_FULL_BLOCK(addr) bm::all_set<true>::is_full_block(addr)
163#define IS_EMPTY_BLOCK(addr) bool(addr == 0)
164
165#define BM_BLOCK_TYPE(addr) bm::all_set<true>::block_type(addr)
166
167// Macro definitions to manipulate bits in pointers
168// This trick is based on the fact that pointers allocated by malloc are
169// aligned and bit 0 is never set. It means we are safe to use it.
170// BM library keeps GAP flag in pointer.
171
172
173// TODO: consider UINTPTR_MAX == 0xFFFFFFFF
174//
175# if ULONG_MAX != 0xffffffff || defined(_WIN64) // 64-bit
176
177# define BMPTR_SETBIT0(ptr) ( ((bm::id64_t)ptr) | 1 )
178# define BMPTR_CLEARBIT0(ptr) ( ((bm::id64_t)ptr) & ~(bm::id64_t)1 )
179# define BMPTR_TESTBIT0(ptr) ( ((bm::id64_t)ptr) & 1 )
180
181# else // 32-bit
182
183# define BMPTR_SETBIT0(ptr) ( ((bm::id_t)ptr) | 1 )
184# define BMPTR_CLEARBIT0(ptr) ( ((bm::id_t)ptr) & ~(bm::id_t)1 )
185# define BMPTR_TESTBIT0(ptr) ( ((bm::id_t)ptr) & 1 )
186
187# endif
188
189# define BMGAP_PTR(ptr) ((bm::gap_word_t*)BMPTR_CLEARBIT0(ptr))
190# define BMSET_PTRGAP(ptr) ptr = (bm::word_t*)BMPTR_SETBIT0(ptr)
191# define BM_IS_GAP(ptr) (BMPTR_TESTBIT0(ptr))
192
193
194
195
196
197#ifdef BM_HASRESTRICT
198# ifndef BMRESTRICT
199# define BMRESTRICT restrict
200# endif
201#else
202# ifndef BMRESTRICT
203# define BMRESTRICT
204# endif
205#endif
206
207#ifndef BMFORCEINLINE
208#ifdef BM_HASFORCEINLINE
209# ifndef BMFORCEINLINE
210# define BMFORCEINLINE __forceinline
211# endif
212#else
213# define BMFORCEINLINE inline
214#endif
215#endif
216
217
218// --------------------------------
219// SSE optmization macros
220//
221
222#ifdef BMSSE42OPT
223# if defined(BM64OPT) || defined(__x86_64) || defined(_M_AMD64) || defined(_WIN64) || \
224 defined(__LP64__) || defined(_LP64) || ( __WORDSIZE == 64 )
225# undef BM64OPT
226# define BM64_SSE4
227# endif
228# undef BMSSE2OPT
229#endif
230
231#ifdef BMAVX2OPT
232# if defined(BM64OPT) || defined(__x86_64) || defined(_M_AMD64) || defined(_WIN64) || \
233 defined(__LP64__) || defined(_LP64)
234# undef BM64OPT
235# undef BM64_SSE4
236# define BM64_AVX2
237# endif
238# undef BMSSE2OPT
239# undef BMSSE42OPT
240#endif
241
242#ifdef BMAVX512OPT
243# if defined(BM64OPT) || defined(__x86_64) || defined(_M_AMD64) || defined(_WIN64) || \
244 defined(__LP64__) || defined(_LP64)
245# undef BM64OPT
246# undef BM64_SSE4
247# undef BM64_AVX2
248# define BM64_AVX512
249# endif
250# undef BMSSE2OPT
251# undef BMSSE42OPT
252#endif
253
254
255
256# ifndef BM_SET_MMX_GUARD
257# define BM_SET_MMX_GUARD
258# endif
259
260
261#if (defined(BMSSE2OPT) || defined(BMSSE42OPT) || defined(BMAVX2OPT) || defined(BMAVX512OPT))
262
263 # ifndef BM_SET_MMX_GUARD
264 # define BM_SET_MMX_GUARD sse_empty_guard bm_mmx_guard_;
265 # endif
266
267 #ifdef _MSC_VER
268
269 #ifndef BM_ALIGN16
270 # define BM_ALIGN16 __declspec(align(16))
271 # define BM_ALIGN16ATTR
272 #endif
273
274 #ifndef BM_ALIGN32
275 # define BM_ALIGN32 __declspec(align(32))
276 # define BM_ALIGN32ATTR
277 #endif
278
279 #ifndef BM_ALIGN64
280 # define BM_ALIGN64 __declspec(align(64))
281 # define BM_ALIGN64ATTR
282 #endif
283
284 # else // GCC
285
286 #ifndef BM_ALIGN16
287 # define BM_ALIGN16
288 # define BM_ALIGN16ATTR __attribute__((aligned(16)))
289 #endif
290
291 #ifndef BM_ALIGN32
292 # define BM_ALIGN32
293 # define BM_ALIGN32ATTR __attribute__((aligned(32)))
294 #endif
295
296 #ifndef BM_ALIGN64
297 # define BM_ALIGN64
298 # define BM_ALIGN64ATTR __attribute__((aligned(64)))
299 #endif
300 #endif
301
302#else
303
304 #define BM_ALIGN16
305 #define BM_ALIGN16ATTR
306 #define BM_ALIGN32
307 #define BM_ALIGN32ATTR
308 #define BM_ALIGN64
309 #define BM_ALIGN64ATTR
310
311#endif
312
313
314
315#if (defined(BMSSE2OPT) || defined(BMSSE42OPT) || defined(BMWASMSIMDOPT) || defined(BMNEONOPT))
316# define BM_VECT_ALIGN BM_ALIGN16
317# define BM_VECT_ALIGN_ATTR BM_ALIGN16ATTR
318#else
319# if defined(BMAVX2OPT)
320# define BM_VECT_ALIGN BM_ALIGN32
321# define BM_VECT_ALIGN_ATTR BM_ALIGN32ATTR
322# else
323# if defined(BMAVX512OPT)
324# define BM_VECT_ALIGN BM_ALIGN64
325# define BM_VECT_ALIGN_ATTR BM_ALIGN64ATTR
326# else
327# define BM_VECT_ALIGN
328# define BM_VECT_ALIGN_ATTR
329# endif
330# endif
331#endif
332
333
334
335// throw redefinintion for compatibility with language wrappers
336//
337#ifndef BM_ASSERT_THROW
338#define BM_ASSERT_THROW(x, xerrcode)
339#endif
340
341
342#ifndef __has_cpp_attribute
343# define __has_cpp_attribute(x) 0
344#endif
345#ifndef __has_attribute
346# define __has_attribute(x) 0
347#endif
348#if __has_cpp_attribute(fallthrough) && \
349 (!defined(__clang__) || (__clang_major__ > 7 && __cplusplus >= 201703L))
350# define BM_FALLTHROUGH [[fallthrough]]
351#elif __has_cpp_attribute(gcc::fallthrough)
352# define BM_FALLTHROUGH [[gcc::fallthrough]]
353#elif __has_cpp_attribute(clang::fallthrough)
354# define BM_FALLTHROUGH [[clang::fallthrough]]
355#elif __has_attribute(fallthrough)
356# define BM_FALLTHROUGH __attribute__ ((fallthrough))
357#else
358# define BM_FALLTHROUGH
359#endif
360
361
362
363