1#ifndef BM__H__INCLUDED__
2#define BM__H__INCLUDED__
30# include <initializer_list>
37#pragma warning( push )
38#pragma warning( disable : 4311 4312 4127)
47# define BM_DECLARE_TEMP_BLOCK(x) bm::bit_block_t x;
158 position_(ref.position_)
160 bv_.set(position_, ref.bv_.get_bit(position_));
165 return bv_.get_bit(position_);
170 bv_.set(position_, (
bool)ref);
176 bv_.set(position_, value);
182 return bool(*
this) == bool(ref);
188 bv_.set_bit_and(position_, value);
195 if (value != bv_.get_bit(position_))
197 bv_.set_bit(position_);
205 bv_.set(position_, value != bv_.get_bit(position_));
212 return !bv_.get_bit(position_);
218 return !bv_.get_bit(position_);
297 if (this->
bv_ != ib.bv_)
return false;
298 if (this->
position_ != ib.position_)
return false;
299 if (this->
block_ != ib.block_)
return false;
300 if (this->
block_type_ != ib.block_type_)
return false;
301 if (this->
block_idx_ != ib.block_idx_)
return false;
312 for (
unsigned i = 0; i < bd.
bit_.
cnt; ++i)
500 buf_ =
bvect_->blockman_.get_allocator().alloc_bit_block();
517 buf_ = iit.buf_; iit.buf_ = 0;
538 buf_ = ii.buf_; ii.buf_ = 0;
695 return this->
valid();
714 static bool decode_wave(block_descr_type* bdescr)
BMNOEXCEPT;
715 bool decode_bit_group(block_descr_type* bdescr)
BMNOEXCEPT;
716 bool decode_bit_group(block_descr_type* bdescr,
743 bit_count_ = this->
valid();
751 this->bit_count_ = 1;
758 this->bit_count_ += this->
valid();
766 this->bit_count_ += this->
valid();
811 : strat(s), glevel_len(glevels)
846 const Alloc& alloc = Alloc())
847 : blockman_(glevel_len, bv_size, alloc),
848 new_blocks_strat_(strat),
858 const Alloc& alloc = Alloc())
859 : blockman_(glevel_len, bv_size, alloc),
860 new_blocks_strat_(strat),
868 : blockman_(
bvect.blockman_),
869 new_blocks_strat_(
bvect.new_blocks_strat_),
878 : blockman_(
bvect.blockman_.glevel_len_,
bvect.blockman_.max_bits_,
bvect.blockman_.alloc_),
879 new_blocks_strat_(
bvect.new_blocks_strat_),
882 if (!
bvect.blockman_.is_init())
893 : blockman_(
bvect.blockman_.glevel_len_,
bvect.blockman_.max_bits_,
bvect.blockman_.alloc_),
894 new_blocks_strat_(
bvect.new_blocks_strat_),
897 if (!
bvect.blockman_.is_init())
900 blockman_.copy_to_arena(
bvect.blockman_);
902 blockman_.copy(
bvect.blockman_);
928 blockman_.move_from(
bvect.blockman_);
930 new_blocks_strat_ =
bvect.new_blocks_strat_;
938 new_blocks_strat_(
BM_BIT),
942 std::initializer_list<size_type>::const_iterator it_start = il.begin();
943 std::initializer_list<size_type>::const_iterator it_end = il.end();
944 for (; it_start < it_end; ++it_start)
1021 {
return blockman_.get_allocator(); }
1027 { blockman_.get_allocator().set_pool(pool_ptr); }
1032 {
return blockman_.get_allocator().get_pool(); }
1899 {
return new_blocks_strat_; }
1922 statistics* stat = 0);
2037 {
return blockman_; }
2045 {
return blockman_; }
2059 const size_type ids_size,
bool opt_flag);
2176 void keep_range_no_check(
size_type left,
2185 unsigned nbit_right,
2195 unsigned nbit_right,
2212template<class Alloc>
2223template<
class Alloc>
2234template<
class Alloc>
2245template<
class Alloc>
2257template<
typename Alloc>
2261 if (!blockman_.is_init())
2262 blockman_.init_tree();
2267template<
typename Alloc>
2272 blockman_.deinit_tree();
2278 blockman_.copy_to_arena(
bvect.blockman_);
2283 blockman_.copy(
bvect.blockman_);
2288 blockman_.copy_to_arena(
bvect.blockman_);
2292 blockman_.copy(
bvect.blockman_);
2304template<
typename Alloc>
2309 blockman_.move_from(
bvect.blockman_);
2310 size_ =
bvect.size_;
2311 new_blocks_strat_ =
bvect.new_blocks_strat_;
2317template<
class Alloc>
2321 if (!blockman_.is_init())
2327 keep_range_no_check(left, right);
2332template<
typename Alloc>
2339 if (!blockman_.is_init() && !
value)
2365template<
typename Alloc>
2368 if (!blockman_.is_init())
2371 word_t*** blk_root = blockman_.top_blocks_root();
2375 unsigned top_blocks = blockman_.top_block_size();
2376 for (
unsigned i = 0; i < top_blocks; ++i)
2385 blk_blk = blk_root[i];
2399 cnt += blockman_.block_bitcount(blk_blk[j]);
2401 cnt += blockman_.block_bitcount(blk_blk[j+1]);
2403 cnt += blockman_.block_bitcount(blk_blk[j+2]);
2405 cnt += blockman_.block_bitcount(blk_blk[j+3]);
2415template<
typename Alloc>
2418 word_t*** blk_root = blockman_.top_blocks_root();
2421 typename blocks_manager_type::block_any_func func(blockman_);
2427template<
typename Alloc>
2432 if (size_ == new_size)
return;
2433 if (size_ < new_size)
2435 if (!blockman_.is_init())
2436 blockman_.init_tree();
2438 blockman_.reserve(new_size);
2450template<
typename Alloc>
2459 if (found && last >= size_)
2465template<
typename Alloc>
2480 if (!blockman_.is_init())
2493 unsigned real_top_blocks = blockman_.find_real_top_blocks();
2494 unsigned max_top_blocks = blockman_.find_max_top_blocks();
2497 rs_idx->set_total(nb + 1);
2498 rs_idx->resize(nb + 1);
2499 rs_idx->resize_effective_super_blocks(real_top_blocks);
2503 BM_ASSERT(max_top_blocks <= blockman_.top_block_size());
2504 bm::word_t*** blk_root = blockman_.top_blocks_root();
2505 for (
unsigned i = 0; i < max_top_blocks; ++i)
2510 rs_idx->set_null_super_block(i);
2515 rs_idx->set_full_super_block(i);
2531 bcount[j] = 0; sub_count[j] = 0;
2534 unsigned local_first, local_second, local_third;
2556 if (is_set) aux0 |= 1;
2559 if (is_set) aux1 |= 1;
2578 unsigned cnt = local_first + local_second + local_third;
2579 BM_ASSERT(cnt == blockman_.block_bitcount(block));
2582 if (bv_blocks && cnt)
2585 BM_ASSERT(cnt >= local_first + local_second);
2586 sub_count[j] =
bm::id64_t(local_first | (local_second << 16)) |
2587 (aux0 << 32) | (aux1 << 48);
2591 rs_idx->register_super_block(i, &bcount[0], &sub_count[0]);
2600template<
typename Alloc>
2604 bm::word_t*** blk_root = blockman_.top_blocks_root();
2607 typename blocks_manager_type::block_count_arr_func func(blockman_, &(arr[0]));
2609 return func.last_block();
2614#define BM_BORDER_TEST(blk, idx) bool(blk[idx >> bm::set_word_shift] & (1u << (idx & bm::set_word_mask)))
2617#if (defined(BMSSE42OPT) || defined(BMAVX2OPT) || defined(BMAVX512OPT) || \
2618 defined(BMWASMSIMDOPT) || defined(BMNEONOPT))
2621template<
typename Alloc>
2623bvector<Alloc>::block_count_to(
const bm::word_t* block,
2625 unsigned nbit_right,
2632 unsigned sub_cnt = unsigned(sub);
2633 unsigned first = sub_cnt & 0xFFFF;
2634 unsigned second = sub_cnt >> 16;
2641 unsigned cnt, aux0(
bm::gap_word_t(sub >> 32)); (void)cnt; (void)aux0;
2643 unsigned cnt1, aux1(
bm::gap_word_t(sub >> 48)); (void)aux1; (void) cnt1;
2648 #if defined(BM64_SSE4) || defined(BM64_AVX2) || defined(BM64_AVX512)
2651 const unsigned cutoff_bias = 0;
2654 unsigned sub_range = rs_idx.find_sub_range(nbit_right);
2664 c = bm::bit_block_calc_count_range<true, false>(block, 0, nbit_right);
2688 bm::bit_block_calc_count_range<true, false>(block,
2695 unsigned bc_second_range =
first + second;
2699 c = bc_second_range;
2713 c = bc_second_range - c;
2718 c = bm::bit_block_calc_count_range<true, false>(
2739 c = bm::bit_block_calc_count_range<true, false>(block,
2742 c +=
first + second;
2767 c = rs_idx.count(nb);
2787 c = bm::bit_block_calc_count_range<true, false>(block,
2809template<
typename Alloc>
2811bvector<Alloc>::block_count_to(
const bm::word_t* block,
2813 unsigned nbit_right,
2819 unsigned sub_cnt = unsigned(sub);
2820 unsigned first = sub_cnt & 0xFFFF;
2821 unsigned second = sub_cnt >> 16;
2828 unsigned cnt, aux0(
bm::gap_word_t(sub >> 32)); (void)cnt; (void)aux0;
2830 unsigned cnt1, aux1(
bm::gap_word_t(sub >> 48)); (void)aux1; (void) cnt1;
2835 unsigned sub_choice =
2841 c = bm::bit_block_calc_count_range<true, false>(block, 0, nbit_right);
2851 c = bm::bit_block_calc_count_range<true, false>(block,
2861 c =
first + second - c;
2864 c = bm::bit_block_calc_count_range<true, false>(
2872 c = bm::bit_block_calc_count_range<true, false>(block,
2875 c +=
first + second;
2888 c = rs_idx.count(nb);
2894 c = rs_idx.count(nb) - c;
2897 c = bm::bit_block_calc_count_range<true, false>(block,
2911#undef BM_BORDER_TEST
2915template<
typename Alloc>
2919 unsigned nbit_right,
2951 unsigned sub_cnt = unsigned(sub);
2953 unsigned second = (sub_cnt >> 16);
2962 unsigned sub_range = rs_idx.find_sub_range(nbit_right);
3008 c =
first + second - c;
3020 c +=
first + second;
3027 c = rs_idx.count(nb);
3032 c = bm::gap_bit_count_range<bm::gap_word_t, true> (gap_block,
3051template<
typename Alloc>
3057 if (!blockman_.is_init())
3065 if (nb_right >= rs_idx.get_total())
3067 cnt = rs_idx.count();
3070 cnt = nb_right ? rs_idx.rcount(nb_right-1) : 0;
3074 const bm::word_t* block = blockman_.get_block_ptr(i, j);
3083 c = this->gap_count_to(gap_block, nb_right, nbit_right, rs_idx);
3089 return cnt + nbit_right + 1;
3093 c = this->block_count_to(block, nb_right, nbit_right, rs_idx);
3104template<
typename Alloc>
3110 if (!blockman_.is_init())
3117 const bm::word_t* block = blockman_.get_block_ptr(i, j);
3126 cnt = gap_count_to(gap_blk, nb_right, nbit_right, rs_idx,
true);
3147 cnt = block_count_to(block, nb_right, nbit_right, rs_idx);
3154 cnt += nb_right ? rs_idx.rcount(nb_right - 1) : 0;
3160template<
typename Alloc>
3166 if (!blockman_.is_init())
3172 size_type cnt = nblock_right ? rs_idx.rcount(nblock_right - 1) : 0;
3176 const bm::word_t* block = blockman_.get_block_ptr(i, j);
3184 cnt += bm::gap_bit_count_to<bm::gap_word_t, true>(
3193 cnt += block_count_to(block, nblock_right, nbit_right, rs_idx);
3205template<
typename Alloc>
3210 if (!blockman_.is_init())
3221template<
typename Alloc>
3233 const bm::word_t* block = blockman_.get_block(i0, j0);
3243 typename blocks_manager_type::block_count_func func(blockman_);
3250 cnt += func.count();
3267 if (nblock_left == nblock_right)
3273 word_t*** blk_root = blockman_.top_blocks_root();
3277 nblock_left+1, nblock_right-1, func);
3278 cnt += func.count();
3282 block = blockman_.get_block(i0, j0);
3302template<
typename Alloc>
3306 if (!blockman_.is_init())
3323 const bm::word_t* block = blockman_.get_block(i0, j0);
3325 if (nblock_left == nblock_right)
3345 block = blockman_.get_block(i0, j0);
3355 if (nblock_left <= nblock_right)
3357 unsigned i_from, j_from, i_to, j_to;
3361 bm::word_t*** blk_root = blockman_.top_blocks_root();
3363 for (
unsigned i = i_from; i <= i_to; ++i)
3371 unsigned j = (i == i_from) ? j_from : 0;
3378 }
while (++j < j_limit);
3386template<
typename Alloc>
3391 if (!blockman_.is_init())
3406 const bm::word_t* block = blockman_.get_block(i0, j0);
3408 if (nblock_left == nblock_right)
3428 block = blockman_.get_block(i0, j0);
3438 if (nblock_left <= nblock_right)
3440 unsigned i_from, j_from, i_to, j_to;
3444 bm::word_t*** blk_root = blockman_.top_blocks_root();
3447 if (i_from >= top_size)
3449 if (i_to >= top_size)
3451 i_to = unsigned(top_size-1);
3456 for (
unsigned i = i_from; i <= i_to; ++i)
3464 unsigned j = (i == i_from) ? j_from : 0;
3471 }
while (++j < j_limit);
3479template<
typename Alloc>
3493template<
typename Alloc>
3501 return this->
test(left);
3504 cnt_l = this->
count_to(left-1, rs_idx);
3507 cnt_r = this->
count_to(right, rs_idx);
3509 return cnt_r - cnt_l;
3514template<
typename Alloc>
3523 bm::word_t*** blk_root = blockman_.top_blocks_root();
3524 for (
unsigned i = 0; i < top_blocks; ++i)
3545 blockman_.set_block_ptr(i, j, 0);
3566template<
typename Alloc>
3577 if (!blockman_.top_blocks_ || i >= blockman_.top_block_size_)
3580 const bm::word_t*
const* blk_blk = blockman_.top_blocks_[i];
3583 if (!blk_blk)
return false;
3599template<
typename Alloc>
3606 if (!blockman_.is_init())
3613 temp_block = blockman_.check_allocate_tempblock();
3622 blockman_.optimize_tree(temp_block, opt_mode, stat);
3625 blockman_.stat_correction(stat);
3626 stat->
memory_used += (unsigned)(
sizeof(*
this) -
sizeof(blockman_));
3630 blockman_.free_temp_block();
3635template<
typename Alloc>
3649 for (; nblock_left <= nblock_right; ++nblock_left)
3653 if (i0 >= blockman_.top_block_size())
3655 bm::word_t* block = blockman_.get_block_ptr(i0, j0);
3657 blockman_.optimize_block(i0, j0, block, temp_block, opt_mode, 0);
3664template<
typename Alloc>
3668 if (!blockman_.is_init())
3678 ::memcpy(opt_glen, st.gap_levels, bm::gap_levels * sizeof(*opt_glen));
3681 st.gap_length + st.gap_blocks,
3690template<typename Alloc>
3695 if (blockman_.is_init())
3697 word_t*** blk_root = blockman_.top_blocks_root();
3699 blocks_manager_type::gap_level_func gl_func(blockman_, glevel_len);
3703 blockman_.set_glen(glevel_len);
3708template<
typename Alloc>
3712 unsigned top_blocks = blockman_.top_block_size();
3713 unsigned bvect_top_blocks = bv.blockman_.top_block_size();
3715 if (bvect_top_blocks > top_blocks) top_blocks = bvect_top_blocks;
3717 for (
unsigned i = 0; i < top_blocks; ++i)
3719 const bm::word_t*
const* blk_blk = blockman_.get_topblock(i);
3720 const bm::word_t*
const* arg_blk_blk = bv.blockman_.get_topblock(i);
3722 if (blk_blk == arg_blk_blk)
3732 arg_blk = arg_blk_blk ? arg_blk_blk[j] : 0;
3740 blk = blk_blk ? blk_blk[j] : 0;
3744 if (blk == arg_blk)
continue;
3749 if (!blk || !arg_blk)
3826template<
typename Alloc>
3831 unsigned top_blocks = blockman_.top_block_size();
3832 bm::word_t*** top_root = blockman_.top_blocks_root();
3834 if (!top_blocks || !top_root)
3839 unsigned i_to, j_to;
3841 unsigned bvect_top_blocks =
bvect.blockman_.top_block_size();
3842 if (!bvect_top_blocks || !arg_top_root)
3844 bool f = this->
find(pos);
3847 if (pos > search_to)
3853 if (bvect_top_blocks > top_blocks)
3854 top_blocks = bvect_top_blocks;
3858 if (i_to < top_blocks)
3859 top_blocks = i_to+1;
3861 for (
unsigned i = 0; i < top_blocks; ++i)
3863 const bm::word_t*
const* blk_blk = blockman_.get_topblock(i);
3864 const bm::word_t*
const* arg_blk_blk =
bvect.blockman_.get_topblock(i);
3866 if (blk_blk == arg_blk_blk)
3910 if (pos > search_to)
3930template<
typename Alloc>
3935 blockman_.swap(
bvect.blockman_);
3942template<
typename Alloc>
3949 ::memcpy(st->gap_levels,
3952 st->max_serialize_mem = unsigned(
sizeof(
bm::id_t) * 4);
3953 unsigned top_size = blockman_.top_block_size();
3955 size_t blocks_mem =
sizeof(blockman_);
3958 blocks_mem +=
sizeof(
bm::word_t**) * top_size;
3959 bm::word_t*** blk_root = blockman_.top_blocks_root();
3963 for (
unsigned i = 0; i < top_size; ++i)
3965 const bm::word_t*
const* blk_blk = blk_root[i];
3972 blk_blk = blk_root[i];
3979 st->ptr_sub_blocks++;
3993 st->add_gap_block(cap, len, level);
3996 st->add_bit_block();
4001 size_t full_null_size = blockman_.calc_serialization_null_full();
4002 st->max_serialize_mem += full_null_size;
4006 size_t safe_inc = st->max_serialize_mem / 10;
4007 if (!safe_inc) safe_inc = 256;
4008 st->max_serialize_mem += safe_inc;
4011 st->memory_used += unsigned(
sizeof(*
this) -
sizeof(blockman_));
4013 st->memory_used += blocks_mem;
4015 st->memory_used +=
sizeof(
typename blocks_manager_type::arena);
4022template<
typename Alloc>
4027 unsigned top_size = blockman_.top_block_size();
4028 bm::word_t*** blk_root = blockman_.top_blocks_root();
4031 for (
unsigned i = 0; i < top_size; ++i)
4033 const bm::word_t*
const* blk_blk = blk_root[i];
4052template<
class Alloc>
4058 if (!ids || !ids_size)
4060 if (!blockman_.is_init())
4061 blockman_.init_tree();
4063 import(ids, ids_size, so);
4069template<
class Alloc>
4075 if (!ids || !ids_size || !blockman_.is_init())
4081 bv_tmp.
import(ids, ids_size, so);
4099template<
class Alloc>
4105 blockman_.destroy_arena();
4108 blockman_.set_all_zero(free_mem);
4113template<
class Alloc>
4119 if (!ids || !ids_size || !blockman_.is_init())
4124 bv_tmp.
import(ids, ids_size, so);
4141template<
class Alloc>
4152template<
class Alloc>
4163template<
class Alloc>
4166 if (val == condition)
return false;
4177template<
class Alloc>
4184 if (!blockman_.is_init())
4185 blockman_.init_tree();
4191template<
class Alloc>
4197 if (!blockman_.is_init())
4198 blockman_.init_tree();
4209template<
class Alloc>
4226 if (nblock == nblock_end)
4249 }
while (start < size_in);
4254template<
class Alloc>
4270 if (nblock == nblock_end)
4274 if (opt_flag && nbit == 65535)
4296 }
while (start < size_in);
4305 nblock_end += bool(nbit == 65535);
4311 }
while (nblock < nblock_end);
4320template<
class Alloc>
4329 blockman_.check_allocate_block(nblock, 1, 0, &block_type,
4335 if (stop-start == 1)
4341 blk = blockman_.deoptimize_block(nblock);
4355template<
class Alloc>
4366 blockman_.check_allocate_block(nblock,
4389 val = ~(*word & mask);
4395 val = ~(*word & mask);
4404template<
class Alloc>
4410 const bool val =
true;
4417 blockman_.check_allocate_block(nblock,
4432 blk[nword] |= (1u << nbit);
4438template<
class Alloc>
4444 const bool val =
false;
4449 blockman_.check_allocate_block(nblock,
4465 blk[nword] &= ~(1u << nbit);
4472template<
class Alloc>
4477 unsigned is_set, new_len, old_len;
4480 if (old_len < new_len)
4482 unsigned threshold =
bm::gap_limit(gap_blk, blockman_.glen());
4483 if (new_len > threshold)
4484 blockman_.extend_gap_block(nblock, gap_blk);
4491template<
class Alloc>
4495 unsigned new_len, old_len;
4498 if (old_len < new_len)
4500 unsigned threshold =
bm::gap_limit(gap_blk, blockman_.glen());
4501 if (new_len > threshold)
4502 blockman_.extend_gap_block(nblock, gap_blk);
4509template<
class Alloc>
4516 blockman_.check_allocate_block(nblock,
4536 is_set = ((*word) & mask);
4538 *word = (is_set) ? (*word & ~mask) : (*word | mask);
4545template<
class Alloc>
4554 blockman_.check_allocate_block(nblock,
4564 if (block_type == 1)
4567 unsigned is_set =
gap_block_set(gap_blk, val, nblock, nbit);
4577 bool is_set = ((*word) & mask) != 0;
4579 if (is_set != condition)
4597template<
class Alloc>
4606 blockman_.check_allocate_block(nblock,
4616 if (block_type == 1)
4621 bool new_val = val & old_val;
4622 if (new_val != old_val)
4624 unsigned is_set =
gap_block_set(gap_blk, val, nblock, nbit);
4636 bool is_set = ((*word) & mask) != 0;
4638 bool new_val = is_set & val;
4657template<
class Alloc>
4672template<
class Alloc>
4677 unsigned top_blocks = blockman_.top_block_size();
4680 for (
unsigned i = top_blocks-1;
true; --i)
4682 const bm::word_t*
const* blk_blk = blockman_.get_topblock(i);
4711 pos = base_idx + block_pos;
4729template<
class Alloc>
4733 if (!blockman_.is_init())
4738 return this->
test(from);
4745 const bm::word_t* block = blockman_.get_block_ptr(i0, j0);
4749 unsigned found_nbit;
4754 base_idx = bm::get_block_start<size_type>(i0, j0);
4755 pos = base_idx + found_nbit;
4766 const bm::word_t*
const* blk_blk = blockman_.get_topblock(i0);
4771 for (
unsigned j = j0;
true; --j)
4794 pos = base_idx + block_pos;
4807 for (
unsigned i = i0;
true; --i)
4809 blk_blk = blockman_.get_topblock(i);
4837 pos = base_idx + block_pos;
4854template<
class Alloc>
4859 unsigned top_blocks = blockman_.top_block_size();
4860 for (
unsigned i = 0; i < top_blocks; ++i)
4862 const bm::word_t*
const* blk_blk = blockman_.get_topblock(i);
4876 found =
true; block_pos = 0;
4888 pos = base_idx + block_pos;
4900template<
class Alloc>
4904 bool found =
find(in_first);
4913 in_first = in_last = 0;
4920template<
class Alloc>
4929 if (!rank_in || !blockman_.is_init())
4934 unsigned bit_pos = 0;
4939 const bm::word_t* block = blockman_.get_block(nb, &no_more_blocks);
4944 unsigned cnt = blockman_.block_bitcount(block);
4973template<
class Alloc>
4984 !blockman_.is_init() ||
4985 (rs_idx.count() < rank_in))
4993 nb = rs_idx.find(rank_in);
4994 BM_ASSERT(rs_idx.rcount(nb) >= rank_in);
4996 rank_in -= rs_idx.rcount(nb-1);
5000 unsigned bit_pos = 0;
5002 for (; nb < rs_idx.get_total(); ++nb)
5005 const bm::word_t* block = blockman_.get_block(nb, &no_more_blocks);
5010 unsigned block_bc = rs_idx.count(nb);
5011 if (rank_in <= block_bc)
5013 nbit = rs_idx.select_sub_range(nb, rank_in);
5019 rank_in -= block_bc;
5044template<
class Alloc>
5051 !blockman_.is_init() ||
5052 (rs_idx.count() < rank_in))
5057 bool found = rs_idx.find(&rank_in, &nb, &sub_range_from);
5063 const bm::word_t* block = blockman_.get_block_ptr(i, j);
5067 unsigned bit_pos = 0;
5071 bit_pos = sub_range_from + unsigned(rank_in) - 1;
5084template<
class Alloc>
5088 if (!blockman_.is_init())
5095 const bm::word_t* block = blockman_.get_block_ptr(i, j);
5125 for (; i < top_blocks; ++i)
5127 const bm::word_t*
const* blk_blk = blockman_.get_topblock(i);
5142 found =
true; block_pos = 0;
5154 prev = base_idx + block_pos;
5168template<
class Alloc>
5173 if (!blockman_.is_init())
5184template<
class Alloc>
5193template<
class Alloc>
5197 bool b = this->
test(0);
5204template<
class Alloc>
5212 if (!blockman_.is_init())
5230 bm::word_t* block = blockman_.get_block_ptr(i, j);
5238 goto insert_bit_check;
5246 unsigned new_block_len;
5249 unsigned threshold =
bm::gap_limit(gap_blk, blockman_.glen());
5250 if (new_block_len > threshold)
5251 blockman_.extend_gap_block(nb, gap_blk);
5259 block = blockman_.deoptimize_block(nb);
5280 unsigned top_blocks = blockman_.top_block_size();
5281 bm::word_t*** blk_root = blockman_.top_blocks_root();
5287 if (i >= top_blocks)
5294 blk_blk = blk_root[i];
5305 blockman_.check_allocate_block(nblock, 0, 0, &block_type,
false);
5306 block[0] |= carry_over;
5309 blk_root = blockman_.top_blocks_root();
5310 blk_blk = blk_root[i];
5311 top_blocks = blockman_.top_block_size();
5322 blk_blk = blockman_.check_alloc_top_subblock(i);
5336 carry_over = 0; block = 0;
5341 if (0 != (block = blk_blk[j]))
5356 block = blockman_.deoptimize_block(nblock);
5357 block[0] <<= (carry_over = 1);
5366 block = blockman_.deoptimize_block(nblock);
5370 unsigned new_block_len;
5374 unsigned threshold =
bm::gap_limit(gap_blk, blockman_.glen());
5375 if (new_block_len > threshold)
5376 blockman_.extend_gap_block(nblock, gap_blk);
5391 blockman_.zero_block(nblock);
5395 blockman_.zero_block(nblock);
5407template<
class Alloc>
5413 if (!blockman_.is_init())
5425 bm::word_t* block = blockman_.get_block_ptr(i, j);
5426 bool carry_over = test_first_block_bit(nb+1);
5431 block = blockman_.check_allocate_block(nb,
BM_BIT);
5438 block = blockman_.deoptimize_block(nb);
5450 unsigned top_blocks = blockman_.top_block_size();
5451 bm::word_t*** blk_root = blockman_.top_blocks_root();
5457 if (i >= top_blocks)
5460 blk_blk = blk_root[i];
5472 bool carry_over = 0;
5476 carry_over = this->
test(co_idx);
5480 blk_blk = blockman_.check_alloc_top_subblock(i);
5484 blk_blk = blockman_.check_alloc_top_subblock(i);
5492 bool carry_over = 0;
5497 bool no_blocks = (j == 0);
5500 if (0 != (block = blk_blk[j]))
5510 blockman_.zero_block(i, j-1);
5518 carry_over = test_first_block_bit(nblock+1);
5523 block = blockman_.deoptimize_block(nblock);
5531 carry_over = test_first_block_bit(nblock+1);
5532 unsigned new_block_len;
5536 unsigned threshold =
bm::gap_limit(gap_blk, blockman_.glen());
5537 if (new_block_len > threshold)
5538 blockman_.extend_gap_block(nblock, gap_blk);
5542 blockman_.zero_block(i, j);
5550 blockman_.zero_block(i, j);
5553 if (carry_over && nblock)
5566template<
class Alloc>
5577template<
class Alloc>
5582 if (!bv.blockman_.is_init())
5591 unsigned top_blocks = blockman_.top_block_size();
5592 if (size_ < bv.size_)
5596 unsigned arg_top_blocks = bv.blockman_.top_block_size();
5597 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
5600 bm::word_t*** blk_root = blockman_.top_blocks_root();
5601 bm::word_t*** blk_root_arg = bv.blockman_.top_blocks_root();
5603 for (
unsigned i = 0; i < top_blocks; ++i)
5606 bm::word_t** blk_blk_arg = (i < arg_top_blocks) ? blk_root_arg[i] : 0;
5607 if (blk_blk == blk_blk_arg || !blk_blk_arg)
5609 if (!blk_blk && blk_blk_arg)
5613 blk_root[i] = blk_root_arg[i];
5614 blk_root_arg[i] = 0;
5621 blockman_.deallocate_top_subblock(i);
5631 blk = blk_blk[j]; arg_blk = blk_blk_arg[j];
5634 if (!blk && arg_blk)
5636 blockman_.set_block_ptr(i, j, arg_blk);
5637 bv.blockman_.set_block_ptr(i, j, 0);
5650template<
class Alloc>
5658 bm::word_t* blk = blockman_.get_block_ptr(i0, j0);
5666template<
class Alloc>
5674 if (blockman_.is_init())
5675 blockman_.deinit_tree();
5696 unsigned top_blocks1 = bman1.top_block_size();
5697 unsigned top_blocks2 = bman2.top_block_size();
5698 unsigned top_blocks = (top_blocks2 > top_blocks1) ? top_blocks2 : top_blocks1;
5699 top_blocks = blockman_.reserve_top_blocks(top_blocks);
5702 if (size_ < bv2.size_)
5705 bm::word_t*** blk_root_arg1 = bv1.blockman_.top_blocks_root();
5711 bm::word_t*** blk_root_arg2 = bv2.blockman_.top_blocks_root();
5718 for (
unsigned i = 0; i < top_blocks; ++i)
5720 bm::word_t** blk_blk_arg1 = (i < top_blocks1) ? blk_root_arg1[i] : 0;
5721 bm::word_t** blk_blk_arg2 = (i < top_blocks2) ? blk_root_arg2[i] : 0;
5723 if (blk_blk_arg1 == blk_blk_arg2)
5726 bm::word_t*** blk_root = blockman_.top_blocks_root();
5727 blk_root[i] = blk_blk_arg1;
5733 bm::word_t*** blk_root = blockman_.top_blocks_root();
5737 bm::word_t** blk_blk = blockman_.alloc_top_subblock(i);
5738 bool any_blocks =
false;
5742 const bm::word_t* arg_blk1 = blk_blk_arg1 ? blk_blk_arg1[j] : 0;
5743 const bm::word_t* arg_blk2 = blk_blk_arg2 ? blk_blk_arg2[j] : 0;
5744 if (arg_blk1 == arg_blk2 && !arg_blk1)
5748 blockman_.optimize_bit_block(i, j, opt_mode);
5749 any_blocks |= bool(blk_blk[j]);
5753 blockman_.free_top_subblock(i);
5758 blockman_.free_temp_block();
5765template<
class Alloc>
5773 if (blockman_.is_init())
5774 blockman_.deinit_tree();
5791 if (!bman1.is_init())
5797 if (!bman2.is_init())
5803 unsigned top_blocks1 = bman1.top_block_size();
5804 unsigned top_blocks2 = bman2.top_block_size();
5805 unsigned top_blocks = (top_blocks2 > top_blocks1) ? top_blocks2 : top_blocks1;
5806 top_blocks = blockman_.reserve_top_blocks(top_blocks);
5809 if (size_ < bv2.size_)
5812 bm::word_t*** blk_root_arg1 = bv1.blockman_.top_blocks_root();
5813 bm::word_t*** blk_root_arg2 = bv2.blockman_.top_blocks_root();
5815 for (
unsigned i = 0; i < top_blocks; ++i)
5817 bm::word_t** blk_blk_arg1 = (i < top_blocks1) ? blk_root_arg1[i] : 0;
5818 bm::word_t** blk_blk_arg2 = (i < top_blocks2) ? blk_root_arg2[i] : 0;
5820 if (blk_blk_arg1 == blk_blk_arg2)
5825 blockman_.deallocate_top_subblock(i);
5833 bm::word_t*** blk_root= blockman_.top_blocks_root();
5846 bm::word_t** blk_blk = blockman_.alloc_top_subblock(i);
5847 bool any_blocks =
false;
5852 arg_blk1 = blk_blk_arg1 ? blk_blk_arg1[j] : 0;
5853 arg_blk2 = blk_blk_arg2 ? blk_blk_arg2[j] : 0;
5855 if ((arg_blk1 == arg_blk2) &&
5861 blockman_.optimize_bit_block(i, j, opt_mode);
5862 any_blocks |= bool(blk_blk[j]);
5866 blockman_.free_top_subblock(i);
5871 blockman_.free_temp_block();
5878template<
class Alloc>
5901 if (blockman_.is_init())
5902 blockman_.deinit_tree();
5906 if (!bman1.is_init() || !bman2.is_init())
5909 unsigned top_blocks1 = bman1.top_block_size();
5910 unsigned top_blocks2 = bman2.top_block_size();
5911 unsigned top_blocks = (top_blocks2 > top_blocks1) ? top_blocks2 : top_blocks1;
5912 top_blocks = blockman_.reserve_top_blocks(top_blocks);
5915 if (size_ < bv2.size_)
5918 bm::word_t*** blk_root_arg1 = bv1.blockman_.top_blocks_root();
5919 bm::word_t*** blk_root_arg2 = bv2.blockman_.top_blocks_root();
5921 for (
unsigned i = 0; i < top_blocks; ++i)
5923 bm::word_t** blk_blk_arg1 = (i < top_blocks1) ? blk_root_arg1[i] : 0;
5924 bm::word_t** blk_blk_arg2 = (i < top_blocks2) ? blk_root_arg2[i] : 0;
5926 if (blk_blk_arg1 == blk_blk_arg2)
5932 bm::word_t*** blk_root = blockman_.top_blocks_root();
5942 bm::word_t** blk_blk = blockman_.alloc_top_subblock(i);
5943 bool any_blocks =
false;
5948 arg_blk1 = blk_blk_arg1 ? blk_blk_arg1[j] : 0;
5949 arg_blk2 = blk_blk_arg2 ? blk_blk_arg2[j] : 0;
5951 if ((arg_blk1 == arg_blk2) && !arg_blk1)
5956 blockman_.optimize_bit_block(i, j, opt_mode);
5957 any_blocks |= bool(blk_blk[j]);
5961 blockman_.free_top_subblock(i);
5966 blockman_.free_temp_block();
5973template<
class Alloc>
5999 if (!bman1.is_init() || !bman2.is_init())
6002 unsigned top_blocks1 = bman1.top_block_size();
6003 unsigned top_blocks2 = bman2.top_block_size();
6004 unsigned top_blocks = (top_blocks2 > top_blocks1) ? top_blocks2 : top_blocks1;
6005 top_blocks = blockman_.reserve_top_blocks(top_blocks);
6008 if (new_size < bv2.size_)
6009 new_size = bv2.size_;
6010 if (size_ < new_size)
6013 bm::word_t*** blk_root_arg1 = bv1.blockman_.top_blocks_root();
6014 bm::word_t*** blk_root_arg2 = bv2.blockman_.top_blocks_root();
6016 for (
unsigned i = 0; i < top_blocks; ++i)
6018 bm::word_t** blk_blk_arg1 = (i < top_blocks1) ? blk_root_arg1[i] : 0;
6019 bm::word_t** blk_blk_arg2 = (i < top_blocks2) ? blk_root_arg2[i] : 0;
6021 if (blk_blk_arg1 == blk_blk_arg2)
6027 bm::word_t*** blk_root = blockman_.top_blocks_root();
6029 blockman_.deallocate_top_subblock(i);
6040 bm::word_t** blk_blk = blockman_.check_alloc_top_subblock(i);
6041 bool any_blocks =
false;
6051 arg_blk1 = blk_blk_arg1 ? blk_blk_arg1[j] : 0;
6052 arg_blk2 = blk_blk_arg2 ? blk_blk_arg2[j] : 0;
6054 if ((arg_blk1 == arg_blk2) && !arg_blk1)
6069 blockman_.optimize_bit_block(i, j, opt_mode);
6071 any_blocks |= bool(blk_blk[j]);
6076 blockman_.free_top_subblock(i);
6081 blockman_.free_temp_block();
6090template<
class Alloc>
6098 if (blockman_.is_init())
6099 blockman_.deinit_tree();
6117 if (!bman1.is_init())
6121 if (!bman2.is_init())
6127 unsigned top_blocks1 = bman1.top_block_size();
6128 unsigned top_blocks2 = bman2.top_block_size();
6129 unsigned top_blocks = (top_blocks2 > top_blocks1) ? top_blocks2 : top_blocks1;
6130 top_blocks = blockman_.reserve_top_blocks(top_blocks);
6133 if (size_ < bv2.size_)
6136 bm::word_t*** blk_root_arg1 = bv1.blockman_.top_blocks_root();
6137 bm::word_t*** blk_root_arg2 = bv2.blockman_.top_blocks_root();
6139 for (
unsigned i = 0; i < top_blocks; ++i)
6141 bm::word_t** blk_blk_arg1 = (i < top_blocks1) ? blk_root_arg1[i] : 0;
6142 bm::word_t** blk_blk_arg2 = (i < top_blocks2) ? blk_root_arg2[i] : 0;
6144 if (blk_blk_arg1 == blk_blk_arg2)
6151 bm::word_t** blk_blk = blockman_.alloc_top_subblock(i);
6152 bool any_blocks =
false;
6156 const bm::word_t* arg_blk1 = blk_blk_arg1 ? blk_blk_arg1[j] : 0;
6157 const bm::word_t* arg_blk2 = blk_blk_arg2 ? blk_blk_arg2[j] : 0;
6158 if ((arg_blk1 == arg_blk2) && !arg_blk1)
6163 blockman_.optimize_bit_block(i, j, opt_mode);
6164 any_blocks |= bool(blk_blk[j]);
6168 blockman_.free_top_subblock(i);
6173 blockman_.free_temp_block();
6181#define BM_OR_OP(x) \
6183 blk = blk_blk[j+x]; arg_blk = blk_blk_arg[j+x]; \
6184 if (blk != arg_blk) \
6185 combine_operation_block_or(i, j+x, blk, arg_blk); \
6188template<
class Alloc>
6191 if (!bv.blockman_.is_init())
6194 unsigned top_blocks = blockman_.top_block_size();
6195 if (size_ < bv.size_)
6198 unsigned arg_top_blocks = bv.blockman_.top_block_size();
6199 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
6201 bm::word_t*** blk_root = blockman_.top_blocks_root();
6202 bm::word_t*** blk_root_arg = bv.blockman_.top_blocks_root();
6204 for (
unsigned i = 0; i < top_blocks; ++i)
6207 bm::word_t** blk_blk_arg = (i < arg_top_blocks) ? blk_root_arg[i] : 0;
6208 if (blk_blk == blk_blk_arg || !blk_blk_arg)
6214 blockman_.deallocate_top_subblock(i);
6219 blk_blk = blockman_.alloc_top_subblock(i);
6226 #if defined(BM64_AVX2) || defined(BM64_AVX512)
6235 #elif defined(BM64_SSE4)
6254#define BM_XOR_OP(x) \
6256 blk = blk_blk[j+x]; arg_blk = blk_blk_arg[j+x]; \
6257 combine_operation_block_xor(i, j+x, blk, arg_blk); \
6260template<
class Alloc>
6263 if (!bv.blockman_.is_init())
6265 if (!blockman_.is_init())
6271 unsigned top_blocks = blockman_.top_block_size();
6272 if (size_ < bv.size_)
6276 unsigned arg_top_blocks = bv.blockman_.top_block_size();
6277 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
6279 bm::word_t*** blk_root = blockman_.top_blocks_root();
6280 bm::word_t*** blk_root_arg = bv.blockman_.top_blocks_root();
6282 for (
unsigned i = 0; i < top_blocks; ++i)
6284 bm::word_t** blk_blk_arg = (i < arg_top_blocks) ? blk_root_arg[i] : 0;
6288 if (blk_blk == blk_blk_arg)
6298 blk_blk = blockman_.check_alloc_top_subblock(i);
6311 blk_blk = blockman_.alloc_top_subblock(i);
6318 #if defined(BM64_AVX2) || defined(BM64_AVX512)
6327 #elif defined(BM64_SSE4)
6347#define BM_AND_OP(x) if (0 != (blk = blk_blk[j+x])) \
6349 if (0 != (arg_blk = blk_blk_arg[j+x])) \
6351 combine_operation_block_and(i, j+x, blk, arg_blk); \
6352 if (opt_mode == opt_compress) \
6353 blockman_.optimize_bit_block(i, j+x, opt_mode); \
6356 blockman_.zero_block(i, j+x); \
6359template<
class Alloc>
6363 if (!blockman_.is_init())
6365 if (!bv.blockman_.is_init())
6371 unsigned top_blocks = blockman_.top_block_size();
6372 if (size_ < bv.size_)
6375 unsigned arg_top_blocks = bv.blockman_.top_block_size();
6376 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
6379 bm::word_t*** blk_root = blockman_.top_blocks_root();
6380 bm::word_t*** blk_root_arg = bv.blockman_.top_blocks_root();
6382 for (
unsigned i = 0; i < top_blocks; ++i)
6387 bm::word_t** blk_blk_arg = (i < arg_top_blocks) ? blk_root_arg[i] : 0;
6397 blockman_.zero_block(i, j);
6398 blockman_.deallocate_top_subblock(i);
6406 blk_blk = blockman_.check_alloc_top_subblock(i);
6413 #if defined(BM64_AVX2) || defined(BM64_AVX512)
6422 #elif defined(BM64_SSE4)
6442#define BM_SUB_OP(x) \
6443 if ((0 != (blk = blk_blk[j+x])) && (0 != (arg_blk = blk_blk_arg[j+x]))) \
6444 combine_operation_block_sub(i, j+x, blk, arg_blk);
6447template<
class Alloc>
6450 if (!blockman_.is_init() || !bv.blockman_.is_init())
6453 unsigned top_blocks = blockman_.top_block_size();
6454 if (size_ < bv.size_)
6457 unsigned arg_top_blocks = bv.blockman_.top_block_size();
6458 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
6460 bm::word_t*** blk_root = blockman_.top_blocks_root();
6461 bm::word_t*** blk_root_arg = bv.blockman_.top_blocks_root();
6463 for (
unsigned i = 0; i < top_blocks; ++i)
6466 bm::word_t** blk_blk_arg = (i < arg_top_blocks) ? blk_root_arg[i] : 0;
6467 if (!blk_blk || !blk_blk_arg)
6472 blockman_.deallocate_top_subblock(i);
6476 blk_blk = blockman_.check_alloc_top_subblock(i);
6483 #if defined(BM64_AVX2) || defined(BM64_AVX512)
6492 #elif defined(BM64_SSE4)
6511template<
class Alloc>
6516 if (!blockman_.is_init())
6520 blockman_.init_tree();
6523 unsigned top_blocks = blockman_.top_block_size();
6524 unsigned arg_top_blocks = bv.blockman_.top_block_size();
6526 if (arg_top_blocks > top_blocks)
6527 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
6529 if (size_ < bv.size_)
6533 blockman_.reserve_top_blocks(arg_top_blocks);
6534 top_blocks = blockman_.top_block_size();
6537 if (size_ > bv.size_)
6542 if (arg_top_blocks < top_blocks)
6545 top_blocks = arg_top_blocks;
6550 bm::word_t*** blk_root = blockman_.top_blocks_root();
6551 unsigned block_idx = 0;
6555 top_blocks = blockman_.top_block_size();
6556 if (top_blocks < bv.blockman_.top_block_size())
6560 top_blocks = bv.blockman_.top_block_size();
6564 for (i = 0; i < top_blocks; ++i)
6574 const bm::word_t*
const* bvbb = bv.blockman_.get_topblock(i);
6584 const bm::word_t* arg_blk = bv.blockman_.get_block(i, j);
6602 const bm::word_t* arg_blk = bv.blockman_.get_block(i, j);
6609 blockman_.zero_block(i, j);
6620 const bm::word_t* arg_blk = bv.blockman_.get_block(i, j);
6633template<
class Alloc>
6642 blockman_.clone_assign_block(i, j, arg_blk2);
6647 blockman_.clone_assign_block(i, j, arg_blk1);
6659 if (is_gap1 | is_gap2)
6661 if (is_gap1 & is_gap2)
6667 blockman_.clone_gap_block(i, j, tmp_buf, res_len);
6675 arg_block = arg_blk2;
6680 arg_block = arg_blk1;
6683 bm::word_t* block = blockman_.clone_assign_block(i, j, arg_block);
6691 bm::word_t* block = blockman_.borrow_tempblock();
6692 blockman_.set_block_ptr(i, j, block);
6698 blockman_.return_tempblock(block);
6706template<
class Alloc>
6716 blockman_.clone_assign_block(i, j, arg_blk2);
6721 blockman_.clone_assign_block(i, j, arg_blk1);
6727 blockman_.clone_assign_block(i, j, arg_blk2,
true);
6733 blockman_.clone_assign_block(i, j, arg_blk1,
true);
6740 if (is_gap1 | is_gap2)
6742 if (is_gap1 & is_gap2)
6748 blockman_.clone_gap_block(i, j, tmp_buf, res_len);
6756 arg_block = arg_blk2;
6761 arg_block = arg_blk1;
6764 bm::word_t* block = blockman_.clone_assign_block(i, j, arg_block);
6772 bm::word_t* block = blockman_.borrow_tempblock();
6773 blockman_.set_block_ptr(i, j, block);
6778 blockman_.set_block_ptr(i, j, 0);
6779 blockman_.return_tempblock(block);
6788template<
class Alloc>
6796 if (!arg_blk1 || !arg_blk2)
6805 blockman_.clone_assign_block(i, j, arg_blk2);
6810 blockman_.clone_assign_block(i, j, arg_blk1);
6817 if (is_gap1 | is_gap2)
6819 if (is_gap1 & is_gap2)
6825 blockman_.clone_gap_block(i, j, tmp_buf, res_len);
6833 arg_block = arg_blk2;
6838 arg_block = arg_blk1;
6841 bm::word_t* block = blockman_.clone_assign_block(i, j, arg_block);
6847 blockman_.set_block_ptr(i, j, 0);
6848 blockman_.return_tempblock(block);
6857 bm::word_t* block = blockman_.borrow_tempblock();
6858 blockman_.set_block_ptr(i, j, block);
6863 blockman_.set_block_ptr(i, j, 0);
6864 blockman_.return_tempblock(block);
6873template<
class Alloc>
6880 bm::word_t* blk = blockman_.get_block_ptr(i, j);
6883 if (!arg_blk1 || !arg_blk2)
6888 blockman_.zero_block(i, j);
6903 blk = blockman_.deoptimize_block_no_check(blk, i, j);
6908 if (is_gap1 | is_gap2)
6910 if (is_gap1 & is_gap2)
6919 blockman_.return_tempblock(blk);
6930 arg_block = arg_blk2;
6935 arg_block = arg_blk1;
6938 bm::word_t* tmp_blk = blockman_.check_allocate_tempblock();
6945 blockman_.return_tempblock(blk);
6957 if (digest == digest_and)
6962 blockman_.return_tempblock(blk);
6973template<
class Alloc>
6985 blockman_.clone_assign_block(i, j, arg_blk1);
6996 if (is_gap1 | is_gap2)
6998 if (is_gap1 & is_gap2)
7004 blockman_.clone_gap_block(i, j, tmp_buf, res_len);
7010 bm::word_t* block = blockman_.borrow_tempblock();
7011 blockman_.set_block_ptr(i, j, block);
7016 blockman_.set_block_ptr(i, j, 0);
7017 blockman_.return_tempblock(block);
7023 bm::word_t* block = blockman_.clone_assign_block(i, j, arg_blk1);
7031 bm::word_t* block = blockman_.borrow_tempblock();
7032 blockman_.set_block_ptr(i, j, block);
7037 blockman_.set_block_ptr(i, j, 0);
7038 blockman_.return_tempblock(block);
7047template<
class Alloc>
7049 unsigned i,
unsigned j,
7059 blockman_.zero_block(i, j);
7073 blockman_.assign_gap_check(i, j, res, ++res_len, blk, tmp_buf);
7078 bm::word_t* new_blk = blockman_.get_allocator().alloc_bit_block();
7082 blockman_.get_allocator().free_gap_block(gap_blk, blockman_.glen());
7083 blockman_.set_block_ptr(i, j, new_blk);
7095 blk = blockman_.clone_gap_block(arg_gap, gap);
7096 blockman_.set_block(i, j, blk, gap);
7108 blk = blockman_.alloc_.alloc_bit_block();
7110 blockman_.set_block_ptr(i, j, blk);
7118 blockman_.get_allocator().free_bit_block(blk);
7126template<
class Alloc>
7128 unsigned i,
unsigned j,
7144 blockman_.set_block_ptr(i, j, 0);
7160 blk = blockman_.get_allocator().alloc_bit_block();
7162 blockman_.set_block_ptr(i, j, blk);
7178 blockman_.assign_gap_check(i, j, res, ++res_len, blk, tmp_buf);
7183 bm::word_t* new_blk = blockman_.get_allocator().alloc_bit_block();
7187 blockman_.get_allocator().free_gap_block(gap_blk, blockman_.glen());
7188 blockman_.set_block_ptr(i, j, new_blk);
7200 blk = blockman_.clone_gap_block(arg_gap, gap);
7201 blockman_.set_block(i, j, blk, gap);
7212 blk = blockman_.alloc_.alloc_bit_block();
7214 blockman_.set_block_ptr(i, j, blk);
7221 blockman_.get_allocator().free_bit_block(blk);
7222 blockman_.set_block_ptr(i, j, 0);
7230template<
class Alloc>
7253 blockman_.assign_gap_check(i, j, res, ++res_len, blk, tmp_buf);
7258 bm::word_t* new_blk = blockman_.get_allocator().alloc_bit_block();
7269 blockman_.get_allocator().free_bit_block(new_blk);
7276 blockman_.get_allocator().free_gap_block(gap_blk, blockman_.glen());
7277 blockman_.set_block_ptr(i, j, new_blk);
7288 blockman_.zero_block(i, j);
7296 bm::word_t* new_blk = blockman_.clone_gap_block(arg_gap, is_new_gap);
7300 blockman_.set_block_ptr(i, j, new_blk);
7308 blockman_.zero_block(i, j);
7318 bm::word_t* new_blk = blockman_.get_allocator().alloc_bit_block();
7320 blockman_.set_block_ptr(i, j, new_blk);
7327 blockman_.get_allocator().free_bit_block(blk);
7328 blockman_.set_block_ptr(i, j, 0);
7334template<
class Alloc>
7342 blockman_.zero_block(i, j);
7361 BM_ASSERT(!(res == tmp_buf && res_len == 0));
7363 blockman_.assign_gap_check(i, j, res, ++res_len, blk, tmp_buf);
7367 blk = blockman_.convert_gap2bitset(i, j, gap_blk);
7379 blockman_.zero_block(i, j);
7394 if (!dst || !arg_blk)
7398 if (ret && ret == arg_blk)
7400 ret = blockman_.get_allocator().alloc_bit_block();
7406 blockman_.set_block_ptr(i, j, ret);
7408 blockman_.get_allocator().free_bit_block(dst);
7414template<
class Alloc>
7429 if (!blk && arg_gap)
7431 blk = blockman_.clone_gap_block(
BMGAP_PTR(arg_blk), gap);
7432 blockman_.set_block(nb, blk, gap);
7451 BM_ASSERT(!(res == tmp_buf && res_len == 0));
7456 blockman_.zero_block(nb);
7458 blockman_.assign_gap(nb, res, ++res_len, blk, tmp_buf);
7471 blockman_.zero_block(nb);
7480 blk = blockman_.convert_gap2bitset(nb, gap_blk);
7517 if (dst == 0 && arg_blk == 0)
7529 ret = blockman_.get_allocator().alloc_bit_block();
7551 }
while (wrd_ptr < wrd_end);
7561 ret = blockman_.get_allocator().alloc_bit_block();
7568 if (ret && ret == arg_blk)
7570 ret = blockman_.get_allocator().alloc_bit_block();
7581 blockman_.set_block(nb, ret);
7584 blockman_.get_allocator().free_bit_block(dst);
7591template<
class Alloc>
7618 gap_init_range_block<gap_word_t>(tmp_gap_blk,
7623 block = blockman_.get_block_ptr(i, j);
7630 if (nblock_left == nblock_right)
7642 blockman_.set_all_set(nb, nb_to-1);
7645 if (nb_to > nblock_right)
7649 block = blockman_.get_block_ptr(i, j);
7651 gap_init_range_block<gap_word_t>(tmp_gap_blk,
7665template<
class Alloc>
7693 bm::gap_init_range_block<gap_word_t>(tmp_gap_blk,
7698 block = blockman_.get_block_ptr(i, j);
7706 if (nblock_left == nblock_right)
7708 nb = nblock_left + 1;
7718 blockman_.set_all_zero(nb, nb_to - 1u);
7721 if (nb_to > nblock_right)
7725 block = blockman_.get_block_ptr(i, j);
7726 gap_init_range_block<gap_word_t>(tmp_gap_blk,
7742template<
class Alloc>
7747 if (!
bvect.blockman_.is_init())
7753 if (blockman_.is_init())
7755 blockman_.deinit_tree();
7765template<
class Alloc>
7779 if (found && (last > right))
7787template<
class Alloc>
7799 blockman_.copy(
bvect.blockman_, nblock_left, nblock_right);
7811 if (found && (last > right))
7819template<
class Alloc>
7830template<
class Alloc>
7834 throw std::bad_alloc();
7836 BM_THROW(BM_ERR_BADALLOC);
7844template<
class Alloc>
7849 block_descr_type* bdescr = &(this->
bdescr_);
7860 unsigned val = *(++(bdescr->
gap_.
ptr));
7866 val = *(++(bdescr->
gap_.
ptr));
7874 unsigned short idx = ++(bdescr->
bit_.
idx);
7875 if (idx < bdescr->bit_.cnt)
7883 if (decode_bit_group(bdescr))
7887 if (search_in_blocks())
7897template<
class Alloc>
7903 return this->
valid();
7907 block_descr_type* bdescr = &(this->
bdescr_);
7913 unsigned short idx = ++(bdescr->
bit_.
idx);
7914 if (idx < bdescr->bit_.cnt)
7923 if (!decode_bit_group(bdescr,
rank))
7938 unsigned int val = *(++(bdescr->
gap_.
ptr));
7945 val = *(++(bdescr->
gap_.
ptr));
7956 if (!search_in_blocks())
7963 return this->
valid();
7970template<
class Alloc>
7976 return this->
valid();
7979 size_type new_pos = this->
bv_->check_or_next(pos);
7992 this->
bv_->get_blocks_manager();
7995 this->
block_ = bman.get_block(i0, j0);
8001 block_descr_type* bdescr = &(this->
bdescr_);
8007 search_in_gapblock();
8010 return this->
valid();
8018 bdescr->
gap_.
ptr = gptr + gpos;
8034 search_in_bitblock();
8035 return this->
valid();
8048 nbit += 32 * parity;
8049 for (
unsigned i = 0; i < bdescr->
bit_.
cnt; ++i)
8052 return this->
valid();
8057 return this->
valid();
8062template<
class Alloc>
8067 blocks_manager_type* bman = &(this->
bv_->blockman_);
8068 if (!bman->is_init())
8074 bm::word_t*** blk_root = bman->top_blocks_root();
8078 for (i = 0; i < bman->top_block_size(); ++i)
8093 this->
block_ = blk_blk[j];
8102 if (search_in_gapblock())
8110 if (search_in_bitblock())
8121template<
class Alloc>
8126 if (bdescr->bit_.cnt)
8128 bdescr->bit_.idx = 0;
8136template<
class Alloc>
8138bvector<Alloc>::enumerator::decode_bit_group(block_descr_type* bdescr)
BMNOEXCEPT
8141 for (; bdescr->bit_.ptr < block_end;)
8143 if (decode_wave(bdescr))
8146 this->
position_ += bdescr->bit_.bits[0];
8157template<
class Alloc>
8159bvector<Alloc>::enumerator::decode_bit_group(block_descr_type* bdescr,
8163 for (; bdescr->bit_.ptr < block_end;)
8166 #if defined(BMAVX512OPT) || defined(BMAVX2OPT) || defined(BM64OPT) || defined(BM64_SSE4)
8189 if (decode_wave(bdescr))
8192 this->
position_ += bdescr->bit_.bits[0];
8205template<
class Alloc>
8206bool bvector<Alloc>::enumerator::search_in_bitblock()
BMNOEXCEPT
8210 block_descr_type* bdescr = &(this->
bdescr_);
8211 bdescr->bit_.ptr = this->
block_;
8212 return decode_bit_group(bdescr);
8217template<
class Alloc>
8218bool bvector<Alloc>::enumerator::search_in_gapblock()
BMNOEXCEPT
8222 block_descr_type* bdescr = &(this->
bdescr_);
8224 unsigned bitval = *(bdescr->gap_.ptr) & 1;
8226 ++(bdescr->gap_.ptr);
8230 unsigned val = *(bdescr->gap_.ptr);
8234 if (bdescr->gap_.ptr ==
first)
8236 bdescr->gap_.gap_len = (
gap_word_t)(val + 1);
8240 bdescr->gap_.gap_len =
8249 ++(bdescr->gap_.ptr);
8256template<
class Alloc>
8257bool bvector<Alloc>::enumerator::search_in_blocks()
BMNOEXCEPT
8260 const blocks_manager_type& bman = this->
bv_->blockman_;
8262 block_idx_type top_block_size = bman.top_block_size();
8263 bm::word_t*** blk_root = bman.top_blocks_root();
8264 for (; i < top_block_size; ++i)
8272 for (++i; i < top_block_size; ++i)
8281 if ((i < top_block_size) && blk_root[i])
8292 this->
block_ = blk_blk[j];
8301 if (search_in_gapblock())
8308 if (search_in_bitblock())
8323#pragma warning( pop )
#define BM_BORDER_TEST(blk, idx)
#define BM_DECLARE_TEMP_BLOCK(x)
Default SIMD friendly allocator.
#define VECT_XOR_ARR_2_MASK(dst, src, src_end, mask)
Constants, lookup tables and typedefs.
#define IS_FULL_BLOCK(addr)
#define IS_VALID_ADDR(addr)
#define BMSET_PTRGAP(ptr)
#define BLOCK_ADDR_SAN(addr)
#define BM_ASSERT_THROW(x, xerrcode)
#define FULL_BLOCK_FAKE_ADDR
#define FULL_SUB_BLOCK_REAL_ADDR
#define FULL_BLOCK_REAL_ADDR
Bit manipulation primitives (internal)
SIMD target version definitions.
Algorithms for fast aggregation of a group of bit-vectors.
Output iterator iterator designed to set "ON" bits based on input sequence of integers.
bulk_insert_iterator(const insert_iterator &iit)
size_type * buf_
bulk insert buffer
bulk_insert_iterator & operator++()
bm::sort_order sorted_
sort order hint
bulk_insert_iterator(const bulk_insert_iterator &iit)
bulk_insert_iterator(bulk_insert_iterator &&iit) BMNOEXCEPT
static size_type buf_size_max() BMNOEXCEPT
bulk_insert_iterator & operator=(size_type n)
bvector_type::size_type size_type
bulk_insert_iterator & operator=(const bulk_insert_iterator &ii)
bulk_insert_iterator & operator*()
size_type buf_size_
current buffer size
std::output_iterator_tag iterator_category
bulk_insert_iterator & operator=(bulk_insert_iterator &&ii) BMNOEXCEPT
bvector_type * bvect_
target bvector
bvector_type * get_bvector() const BMNOEXCEPT
bulk_insert_iterator & operator++(int)
bulk_insert_iterator(bvector< Alloc > &bvect, bm::sort_order so=BM_UNKNOWN) BMNOEXCEPT
bvector_type::size_type value_type
bulk_insert_iterator() BMNOEXCEPT
bm::bvector< Alloc > bvector_type
Constant iterator designed to enumerate "ON" bits counted_enumerator keeps bitcount,...
counted_enumerator & operator++() BMNOEXCEPT
counted_enumerator(const enumerator &en) BMNOEXCEPT
size_type count() const BMNOEXCEPT
Number of bits ON starting from the .
counted_enumerator() BMNOEXCEPT
counted_enumerator operator++(int)
std::input_iterator_tag iterator_category
counted_enumerator & operator=(const enumerator &en) BMNOEXCEPT
Constant iterator designed to enumerate "ON" bits.
std::input_iterator_tag iterator_category
enumerator(const bvector< Alloc > &bv, size_type pos=0) BMNOEXCEPT
Construct enumerator for bit vector.
void go_first() BMNOEXCEPT
Position enumerator to the first available bit.
size_type value() const BMNOEXCEPT
Get current position (value)
size_type operator*() const BMNOEXCEPT
Get current position (value)
enumerator operator++(int) BMNOEXCEPT
Advance enumerator forward to the next available bit. Possibly do NOT use this operator it is slower ...
bool go_to(size_type pos) BMNOEXCEPT
go to a specific position in the bit-vector (or next)
enumerator & operator++() BMNOEXCEPT
Advance enumerator forward to the next available bit.
enumerator(const bvector< Alloc > *bv) BMNOEXCEPT
Construct enumerator associated with a vector. Important: This construction creates unpositioned iter...
bool skip(size_type rank) BMNOEXCEPT
Skip specified number of bits from enumeration.
bool go_up() BMNOEXCEPT
Advance enumerator to the next available bit.
bool skip_to_rank(size_type rank) BMNOEXCEPT
Skip to specified relative rank.
bool advance() BMNOEXCEPT
enumerator(const bvector< Alloc > *bv, size_type pos) BMNOEXCEPT
Construct enumerator for bit vector.
Output iterator iterator designed to set "ON" bits based on input sequence of integers (bit indeces).
insert_iterator(const insert_iterator &iit)
insert_iterator(bvector< Alloc > &bvect) BMNOEXCEPT
bvector_type * get_bvector() const
insert_iterator() BMNOEXCEPT
insert_iterator & operator++(int)
insert_iterator & operator=(const insert_iterator &ii)
insert_iterator & operator++()
insert_iterator & operator*()
bm::bvector< Alloc > bvector_type
insert_iterator & operator=(size_type n)
std::output_iterator_tag iterator_category
Base class for all iterators.
size_type position_
Bit position (bit idx)
iterator_base() BMNOEXCEPT
bool valid() const BMNOEXCEPT
Checks if iterator is still valid.
bool operator!=(const iterator_base &it) const BMNOEXCEPT
unsigned block_type_
Type of block. 0-Bit, 1-GAP.
bool compare_state(const iterator_base &ib) const BMNOEXCEPT
Compare FSMs for testing purposes.
union bm::bvector::iterator_base::block_descr bdescr_
void invalidate() BMNOEXCEPT
Turns iterator into an invalid state.
bool operator<(const iterator_base &it) const BMNOEXCEPT
bm::bvector< Alloc > * bv_
Pointer on parent bitvector.
bool operator==(const iterator_base &it) const BMNOEXCEPT
const bm::word_t * block_
Block pointer.(NULL-invalid)
bool operator>=(const iterator_base &it) const BMNOEXCEPT
bool operator>(const iterator_base &it) const BMNOEXCEPT
bool operator<=(const iterator_base &it) const BMNOEXCEPT
block_idx_type block_idx_
Block index.
Class reference implements an object for bit assignment.
bool operator==(const reference &ref) const BMNOEXCEPT
bool operator~() const BMNOEXCEPT
reference(const reference &ref) BMNOEXCEPT
bool operator!() const BMNOEXCEPT
const reference & operator|=(bool value) const
const reference & operator=(const reference &ref) const
const reference & operator=(bool value) const BMNOEXCEPT
const reference & operator&=(bool value) const
const reference & operator^=(bool value) const
reference(bvector< Alloc > &bv, size_type position) BMNOEXCEPT
Bitvector Bit-vector container with runtime compression of bits.
bm::bvector< Alloc > & bit_or(const bm::bvector< Alloc > &bv1, const bm::bvector< Alloc > &bv2, typename bm::bvector< Alloc >::optmode opt_mode=opt_none)
3-operand OR : this := bv1 OR bv2
void import_block(const size_type *ids, block_idx_type nblock, size_type start, size_type stop)
optmode
Optimization mode Every next level means additional checks (better compression vs time)
@ opt_compress
compress blocks when possible (GAP/prefix sum)
@ opt_free_01
Free unused 0 and 1 blocks.
@ opt_free_0
Free unused 0 blocks.
@ opt_none
no optimization
bool find(size_type &pos) const BMNOEXCEPT
Finds index of first 1 bit.
bvector(const bvector< Alloc > &bvect, bm::finalization is_final)
Copy-constructor for mutable/immutable initialization.
void operator&=(const bvector< Alloc > &bv)
blocks_manager_type & get_blocks_manager() BMNOEXCEPT
get access to memory manager (internal) Use only if you are BitMagic library
bool combine_operation_block_and_or(unsigned i, unsigned j, const bm::word_t *arg_blk1, const bm::word_t *arg_blk2)
void set_gap_levels(const gap_word_t *glevel_len)
Sets new GAP lengths table. All GAP blocks will be reallocated to match the new scheme.
bvector(strategy strat=BM_BIT, const gap_word_t *glevel_len=bm::gap_len_table< true >::_len, size_type bv_size=bm::id_max, const Alloc &alloc=Alloc())
Constructs bvector class.
bool test(size_type n) const BMNOEXCEPT
returns true if bit n is set and false is bit n is 0.
bvector(bvector< Alloc > &&bvect) BMNOEXCEPT
Move constructor.
void merge(bm::bvector< Alloc > &bvect)
Merge/move content from another vector.
bool equal(const bvector< Alloc > &bvect) const BMNOEXCEPT
Equal comparison with an agr bit-vector.
void clear_range_no_check(size_type left, size_type right)
Clear range without validity/bounds checking.
bvector< Alloc > & invert()
Invert/NEG all bits It should be noted, invert is affected by size() if size is set - it only inverts...
bool set_bit_and(size_type n, bool val=true)
Sets bit n using bit AND with the provided value.
allocator_type::allocator_pool_type allocator_pool_type
size_type size() const BMNOEXCEPT
Returns bvector's capacity (number of bits it can store)
size_type count() const BMNOEXCEPT
population count (count of ON bits)
void combine_operation_xor(const bm::bvector< Alloc > &bvect)
perform a set-algebra operation XOR
bool set_bit_conditional_impl(size_type n, bool val, bool condition)
bool clear_bit(size_type n)
Clears bit n.
bool operator[](size_type n) const BMNOEXCEPT
bool is_init() const BMNOEXCEPT
Return true if bvector is initialized at all.
bm::bvector< Alloc > & bit_and(const bm::bvector< Alloc > &bv1, const bm::bvector< Alloc > &bv2, typename bm::bvector< Alloc >::optmode opt_mode=opt_none)
3-operand AND : this := bv1 AND bv2
static void throw_bad_alloc()
size_type get_next(size_type prev) const BMNOEXCEPT
Finds the number of the next bit ON.
bool insert(size_type n, bool value)
Insert bit into specified position All the vector content after insert position is shifted right.
bool combine_operation_block_or(unsigned i, unsigned j, const bm::word_t *arg_blk1, const bm::word_t *arg_blk2)
bvector & operator=(const bvector< Alloc > &bvect)
Copy assignment operator.
const blocks_manager_type & get_blocks_manager() const BMNOEXCEPT
get access to memory manager (internal) Use only if you are BitMagic library
void sync_size()
Syncronize size if it got extended due to bulk import.
bool find_range(size_type &first, size_type &last) const BMNOEXCEPT
Finds dynamic range of bit-vector [first, last].
bvector< Alloc > operator~() const
size_type check_or_next_extract(size_type prev)
check if specified bit is 1, and set it to 0 if specified bit is 0, scan for the next 1 and returns i...
bool any_range(size_type left, size_type right) const BMNOEXCEPT
Returns true if any bits in the range are 1s (non-empty interval) Function uses closed interval [left...
void resize(size_type new_size)
Change size of the bvector.
size_type check_or_next(size_type prev) const BMNOEXCEPT
reference operator[](size_type n)
bm::bvector< Alloc > & bit_sub(const bm::bvector< Alloc > &bv1, const bm::bvector< Alloc > &bv2, typename bm::bvector< Alloc >::optmode opt_mode=opt_none)
3-operand SUB : this := bv1 MINUS bv2 SUBtraction is also known as AND NOT
void set_allocator_pool(allocator_pool_type *pool_ptr) BMNOEXCEPT
Set allocator pool for local (non-th readed) memory cyclic(lots of alloc-free ops) opertations.
bvector(const bvector< Alloc > &bvect)
Copy constructor.
strategy get_new_blocks_strat() const BMNOEXCEPT
Returns blocks allocation strategy.
void optimize(bm::word_t *temp_block=0, optmode opt_mode=opt_compress, statistics *stat=0)
Optimize memory bitvector's memory allocation.
void forget_count() BMNOEXCEPT
size_type count_to_test(size_type n, const rs_index_type &rs_idx) const BMNOEXCEPT
popcount in [0..right] range if test(right) == true
void set_new_blocks_strat(strategy strat)
Sets new blocks allocation strategy.
size_type rank(size_type n, const rs_index_type &rs_idx) const BMNOEXCEPT
Returns rank of specified bit position (same as count_to())
bvector(std::initializer_list< size_type > il)
Brace constructor.
void fill_alloc_digest(bvector< Alloc > &bv_blocks) const
Calculate blocks digest vector (for diagnostics purposes) 1 is added if NB is a real,...
bool any() const BMNOEXCEPT
Returns true if any bits in this bitset are set, and otherwise returns false.
bool set_bit(size_type n, bool val=true)
Sets bit n.
insert_iterator inserter()
blocks_manager< Alloc > blocks_manager_type
bool empty() const BMNOEXCEPT
Returns true if the set is empty (no bits are set, otherwise returns false) Please note that this is ...
void keep_range(size_type left, size_type right)
Sets all bits to zero outside of the closed interval [left,right] Expected result: 00000....
void operator-=(const bvector< Alloc > &bv)
bool is_all_one_range(size_type left, size_type right) const BMNOEXCEPT
Returns true if all bits in the range are 1s (saturated interval) Function uses closed interval [left...
bool select(size_type rank, size_type &pos, const rs_index_type &rs_idx) const BMNOEXCEPT
select bit-vector position for the specified rank(bitcount)
Alloc get_allocator() const
bool and_bit_no_check(size_type n, bool val)
AND specified bit without checking preconditions (size, etc)
bvector_size_type size_type
bvector< Alloc > & flip(size_type n)
Flips bit n.
void clear_range(size_type left, size_type right)
Sets all bits to zero in the specified closed interval [left,right] Interval must be inside the bvect...
bool combine_operation_block_and(unsigned i, unsigned j, const bm::word_t *arg_blk1, const bm::word_t *arg_blk2)
void combine_operation_and(const bm::bvector< Alloc > &bvect, optmode opt_mode)
perform a set-algebra operation AND
bool find_first_mismatch(const bvector< Alloc > &bvect, size_type &pos, size_type search_to=bm::id_max) const BMNOEXCEPT
Find index of first bit different between this and the agr vector.
enumerator first() const
Returns enumerator pointing on the first non-zero bit.
bool combine_operation_block_sub(unsigned i, unsigned j, const bm::word_t *arg_blk1, const bm::word_t *arg_blk2)
bool combine_operation_block_xor(unsigned i, unsigned j, const bm::word_t *arg_blk1, const bm::word_t *arg_blk2)
enumerator end() const
Returns enumerator pointing on the next bit after the last.
bvector< Alloc > & set()
Sets every bit in this bitset to 1.
bool shift_left()
Shift left by 1 bit, fill with zero return carry out.
void swap(bvector< Alloc > &bvect) BMNOEXCEPT
Exchanges content of bv and this bvector.
size_type count_range(size_type left, size_type right, const rs_index_type &rs_idx) const BMNOEXCEPT
Returns count of 1 bits in the given range [left..right] Uses rank-select index to accelerate the sea...
bm::bvector< Alloc > & bit_and(const bm::bvector< Alloc > &bv, optmode opt_mode=opt_none)
2 operand logical AND
size_type get_first() const BMNOEXCEPT
find first 1 bit in vector. Function may return 0 and this requires an extra check if bit 0 is actual...
void freeze()
Turn current vector to read-only (immutable vector).
bool find_rank(size_type rank, size_type from, size_type &pos) const BMNOEXCEPT
Find bit-vector position for the specified rank(bitcount)
bvector & operator=(bvector< Alloc > &&bvect) BMNOEXCEPT
Move assignment operator.
void optimize_gap_size()
Optimize sizes of GAP blocks.
void init()
Explicit post-construction initialization. Must be caled to make sure safe use of *_no_check() method...
bool shift_right()
Shift right by 1 bit, fill with zero return carry out.
bvector< Alloc > & flip()
Flips all bits.
bvector< Alloc > & set_range(size_type left, size_type right, bool value=true)
Sets all bits in the specified closed interval [left,right] Interval must be inside the bvector's siz...
void erase(size_type n)
Erase bit in the specified position All the vector content after erase position is shifted left.
size_type extract_next(size_type prev)
Finds the number of the next bit ON and sets it to 0.
void copy_range(const bvector< Alloc > &bvect, size_type left, size_type right)
Copy all bits in the specified closed interval [left,right].
bool is_ro() const BMNOEXCEPT
Returns true if vector is read-only.
enumerator get_enumerator(size_type pos) const
Returns enumerator pointing on specified or the next available bit.
size_type rank_corrected(size_type n, const rs_index_type &rs_idx) const BMNOEXCEPT
Returns rank corrceted by the requested border value (as -1)
void import(const size_type *ids, size_type ids_size, bm::sort_order sorted_idx)
Import integers (set bits).
void copy_range_no_check(const bvector< Alloc > &bvect, size_type left, size_type right)
void combine_operation_or(const bm::bvector< Alloc > &bvect)
perform a set-algebra operation OR
blocks_manager_type::block_idx_type block_idx_type
void copy(const bvector< Alloc > &bvect, bm::finalization is_final)
Copy bvector from the argument bvector.
void operator^=(const bvector< Alloc > &bv)
void operator|=(const bvector< Alloc > &bv)
void clear(const size_type *ids, size_type ids_size, bm::sort_order so=bm::BM_UNKNOWN)
clear list of bits in this bitset
void import_sorted(const size_type *ids, const size_type ids_size, bool opt_flag)
Import sorted integers (set bits).
void optimize_range(size_type left, size_type right, bm::word_t *temp_block, optmode opt_mode=opt_compress)
block_idx_type count_blocks(unsigned *arr) const BMNOEXCEPT
Computes bitcount values for all bvector blocks.
rs_index< allocator_type > rs_index_type
size_type count_range_no_check(size_type left, size_type right) const BMNOEXCEPT
bvector(const bvector< Alloc > &bvect, size_type left, size_type right)
Copy constructor for range copy [left..right].
void combine_operation_sub(const bm::bvector< Alloc > &bvect)
perform a set-algebra operation MINUS (AND NOT)
int compare(const bvector< Alloc > &bvect) const BMNOEXCEPT
Lexicographical comparison with a bitvector.
size_type count_to(size_type n, const rs_index_type &rs_idx) const BMNOEXCEPT
Returns count of 1 bits (population) in [0..right] range.
rs_index< allocator_type > blocks_count
bool none() const BMNOEXCEPT
Returns true if no bits are set, otherwise returns false.
bool set_bit_conditional(size_type n, bool val, bool condition)
Sets bit n only if current value equals the condition.
bool find_reverse(size_type &pos) const BMNOEXCEPT
Finds last index of 1 bit.
allocator_pool_type * get_allocator_pool() BMNOEXCEPT
Get curent allocator pool (if set)
void build_rs_index(rs_index_type *rs_idx, bvector< Alloc > *bv_blocks=0) const
compute running total of all blocks in bit vector (rank-select index)
void combine_operation_with_block(block_idx_type nb, const bm::word_t *arg_blk, bool arg_gap, bm::operation opcode)
bm::bvector< Alloc > & bit_xor(const bm::bvector< Alloc > &bv)
2 operand logical XOR
void clear_bit_no_check(size_type n)
Clears bit n without precondiion checks.
void gap_block_set_no_ret(bm::gap_word_t *gap_blk, bool val, block_idx_type nblock, unsigned nbit)
set bit in GAP block with GAP block length control
bvector(size_type bv_size, strategy strat=BM_BIT, const gap_word_t *glevel_len=bm::gap_len_table< true >::_len, const Alloc &alloc=Alloc())
Constructs bvector class.
void combine_operation(const bm::bvector< Alloc > &bvect, bm::operation opcode)
perform a set-algebra operation by operation code
void keep(const size_type *ids, size_type ids_size, bm::sort_order so=bm::BM_UNKNOWN)
Keep list of bits in this bitset, others are cleared.
bm::bvector< Alloc > & bit_or_and(const bm::bvector< Alloc > &bv1, const bm::bvector< Alloc > &bv2, typename bm::bvector< Alloc >::optmode opt_mode=opt_none)
3-operand AND where result is ORed into the terget vector : this |= bv1 AND bv2 TARGET := TARGET OR (...
bm::bvector< Alloc > & bit_sub(const bm::bvector< Alloc > &bv)
2 operand logical SUB(AND NOT). Also known as MINUS.
bvector< Alloc > & reset() BMNOEXCEPT
Clears every bit in the bitvector.
bm::bvector< Alloc > & bit_or(const bm::bvector< Alloc > &bv)
2 operand logical OR
bool get_bit(size_type n) const BMNOEXCEPT
returns true if bit n is set and false is bit n is 0.
bool gap_block_set(bm::gap_word_t *gap_blk, bool val, block_idx_type nblock, unsigned nbit)
set bit in GAP block with GAP block length control
void calc_stat(struct bm::bvector< Alloc >::statistics *st) const BMNOEXCEPT
Calculates bitvector statistics.
bm::bvector< Alloc > & bit_xor(const bm::bvector< Alloc > &bv1, const bm::bvector< Alloc > &bv2, typename bm::bvector< Alloc >::optmode opt_mode=opt_none)
3-operand XOR : this := bv1 XOR bv2
void set_range_no_check(size_type left, size_type right)
Set range without validity/bounds checking.
size_type recalc_count() BMNOEXCEPT
void move_from(bvector< Alloc > &bvect) BMNOEXCEPT
Move bvector content from another bvector.
bool inc(size_type n)
Increment the specified element.
void set_bit_no_check(size_type n)
Set bit without checking preconditions (size, etc)
Deserializer for bit-vector.
Deserializer, performs logical operations between bit-vector and serialized bit-vector.
Encoding utilities for serialization (internal)
BMFORCEINLINE bool avx2_test_all_zero_wave(const void *ptr)
check if wave of pointers is all NULL
BMFORCEINLINE bool avx2_test_all_zero_wave2(const void *ptr0, const void *ptr1)
check if 2 wave of pointers are all NULL
BMFORCEINLINE bool avx2_test_all_eq_wave2(const void *ptr0, const void *ptr1)
check if 2 wave of pointers are all the same (NULL or FULL)
BMFORCEINLINE bool sse42_test_all_eq_wave2(const void *ptr0, const void *ptr1) BMNOEXCEPT
check if wave of 2 pointers are the same (null or FULL)
BMFORCEINLINE bool sse42_test_all_zero_wave(const void *ptr) BMNOEXCEPT
check if wave of pointers is all NULL
BMFORCEINLINE bool sse42_test_all_zero_wave2(const void *ptr0, const void *ptr1) BMNOEXCEPT
check if 2 waves of pointers are all NULL
unsigned bit_block_find(const bm::word_t *BMRESTRICT block, unsigned nbit, unsigned *BMRESTRICT pos) BMNOEXCEPT
Searches for the next 1 bit in the BIT block.
bool bit_block_or_2way(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src1, const bm::word_t *BMRESTRICT src2) BMNOEXCEPT
2 way (target := source1 | source2) bitblock OR operation.
BMFORCEINLINE bm::id_t word_bitcount(bm::id_t w) BMNOEXCEPT
void bit_block_copy(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
Bitblock copy operation.
bm::word_t * bit_operation_sub(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
bitblock SUB operation.
bm::word_t * bit_operation_xor(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
bitblock XOR operation.
bm::id64_t bit_block_sub_2way(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src1, const bm::word_t *BMRESTRICT src2, bm::id64_t digest) BMNOEXCEPT
digest based bitblock SUB (AND NOT) operation (3 operand)
bm::id64_t bit_block_and(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
Plain bitblock AND operation. Function does not analyse availability of source and destination blocks...
bool bit_block_shift_r1_unr(bm::word_t *BMRESTRICT block, bm::word_t *BMRESTRICT empty_acc, bm::word_t co_flag) BMNOEXCEPT
Right bit-shift of bit-block by 1 bit (loop unrolled)
BMFORCEINLINE unsigned word_bitcount64(bm::id64_t x) BMNOEXCEPT
bm::word_t * bit_operation_or(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
Block OR operation. Makes analysis if block is 0 or FULL.
bm::id_t bit_block_calc_count_to(const bm::word_t *block, bm::word_t right) BMNOEXCEPT
void bit_block_erase(bm::word_t *block, unsigned bitpos, bool carry_over) BMNOEXCEPT
erase bit from position and shift the rest right with carryover
bool bit_block_shift_l1_unr(bm::word_t *block, bm::word_t *empty_acc, bm::word_t co_flag) BMNOEXCEPT
Left bit-shift of bit-block by 1 bit (loop unrolled)
bm::id64_t calc_block_digest0(const bm::word_t *const block) BMNOEXCEPT
Compute digest for 64 non-zero areas.
bm::id64_t bit_block_and_2way(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src1, const bm::word_t *BMRESTRICT src2, bm::id64_t digest) BMNOEXCEPT
digest based bit-block AND
unsigned short bitscan_wave(const bm::word_t *BMRESTRICT w_ptr, unsigned char *BMRESTRICT bits) BMNOEXCEPT
Unpacks word wave (Nx 32-bit words)
bool bit_find_first(const bm::word_t *BMRESTRICT block, unsigned *BMRESTRICT pos) BMNOEXCEPT
BIT block find the first set bit.
void bit_invert(T *start) BMNOEXCEPT
int bitcmp(const T *buf1, const T *buf2, unsigned len) BMNOEXCEPT
Lexicographical comparison of BIT buffers.
bm::id64_t bit_block_and_or_2way(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src1, const bm::word_t *BMRESTRICT src2, bm::id64_t digest) BMNOEXCEPT
digest based bit-block AND - OR
bm::id64_t bit_block_xor(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
Plain bitblock XOR operation. Function does not analyse availability of source and destination blocks...
void bit_block_set(bm::word_t *BMRESTRICT dst, bm::word_t value) BMNOEXCEPT
Bitblock memset operation.
bool bit_block_or(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
Plain bitblock OR operation. Function does not analyse availability of source and destination blocks.
bool bit_is_all_zero(const bm::word_t *BMRESTRICT start) BMNOEXCEPT
Returns "true" if all bits in the block are 0.
unsigned bit_find_last(const bm::word_t *BMRESTRICT block, unsigned *BMRESTRICT last) BMNOEXCEPT
BIT block find the last set bit (backward search)
bm::id64_t update_block_digest0(const bm::word_t *const block, bm::id64_t digest) BMNOEXCEPT
Compute digest for 64 non-zero areas based on existing digest (function revalidates zero areas)
bm::id_t bit_block_calc_count_range(const bm::word_t *block, bm::word_t left, bm::word_t right) BMNOEXCEPT
void bit_andnot_arr_ffmask(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
bitblock AND NOT with constant ~0 mask operation.
bm::word_t * bit_operation_and(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
bitblock AND operation.
bm::word_t bit_block_insert(bm::word_t *BMRESTRICT block, unsigned bitpos, bool value) BMNOEXCEPT
insert bit into position and shift the rest right with carryover
bm::id64_t bit_block_sub(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
Plain bitblock SUB (AND NOT) operation. Function does not analyse availability of source and destinat...
bm::id64_t bit_block_xor_2way(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src1, const bm::word_t *BMRESTRICT src2) BMNOEXCEPT
2 way (target := source1 ^ source2) bitblock XOR operation.
bool is_bits_one(const bm::wordop_t *start) BMNOEXCEPT
Returns "true" if all bits in the block are 1.
sort_order
Sort order declaration.
bm::alloc_pool_guard< allocator_pool_type, bvector< Alloc > > mem_pool_guard
int(* bit_visitor_callback_type)(void *handle_ptr, bm::id_t bit_idx)
Callback type to visit (callback style) bits in bit-vector(s)
finalization
copy strategy
strategy
Block allocation strategies.
@ BM_SORTED
input set is sorted (ascending order)
@ BM_UNKNOWN
sort order unknown
@ READWRITE
mutable (read-write object)
@ READONLY
immutable (read-only object)
@ BM_BIT
No GAP compression strategy. All new blocks are bit blocks.
unsigned gap_test_unr(const T *BMRESTRICT buf, const unsigned pos) BMNOEXCEPT
Tests if bit = pos is true. Analog of bm::gap_test with SIMD unrolling.
unsigned gap_bit_count_range(const T *const buf, unsigned left, unsigned right) BMNOEXCEPT
Counts 1 bits in GAP buffer in the closed [left, right] range.
gap_word_t * gap_operation_or(const gap_word_t *BMRESTRICT vect1, const gap_word_t *BMRESTRICT vect2, gap_word_t *BMRESTRICT tmp_buf, unsigned &dsize) BMNOEXCEPT
GAP OR operation.
unsigned gap_find_last(const T *BMRESTRICT buf, unsigned *BMRESTRICT last) BMNOEXCEPT
GAP block find the last set bit.
void gap_and_to_bitset(unsigned *BMRESTRICT dest, const T *BMRESTRICT pcurr) BMNOEXCEPT
ANDs GAP block to bitblock.
unsigned gap_set_value(unsigned val, T *BMRESTRICT buf, unsigned pos, unsigned *BMRESTRICT is_set) BMNOEXCEPT
Sets or clears bit in the GAP buffer.
gap_word_t * gap_operation_xor(const gap_word_t *BMRESTRICT vect1, const gap_word_t *BMRESTRICT vect2, gap_word_t *BMRESTRICT tmp_buf, unsigned &dsize) BMNOEXCEPT
GAP XOR operation.
T gap_level(const T *BMRESTRICT buf) BMNOEXCEPT
Returs GAP blocks capacity level.
unsigned gap_bit_count_to(const T *const buf, T right) BMNOEXCEPT
Counts 1 bits in GAP buffer in the closed [0, right] range.
void gap_xor_to_bitset(unsigned *BMRESTRICT dest, const T *BMRESTRICT pcurr) BMNOEXCEPT
XOR GAP block to bitblock.
bool gap_shift_r1(T *BMRESTRICT buf, unsigned co_flag, unsigned *BMRESTRICT new_len) BMNOEXCEPT
Right shift GAP block by 1 bit.
int gapcmp(const T *buf1, const T *buf2) BMNOEXCEPT
Lexicographical comparison of GAP buffers.
void gap_convert_to_bitset(unsigned *BMRESTRICT dest, const T *BMRESTRICT buf, unsigned len=0) BMNOEXCEPT
GAP block to bitblock conversion.
unsigned gap_find_first(const T *BMRESTRICT buf, unsigned *BMRESTRICT first) BMNOEXCEPT
GAP block find the first set bit.
bool gap_insert(T *BMRESTRICT buf, unsigned pos, unsigned val, unsigned *BMRESTRICT new_len) BMNOEXCEPT
isnert bit into GAP compressed block
void gap_invert(T *buf) BMNOEXCEPT
Inverts all bits in the GAP buffer.
void gap_sub_to_bitset(unsigned *BMRESTRICT dest, const T *BMRESTRICT pcurr) BMNOEXCEPT
SUB (AND NOT) GAP block to bitblock.
gap_word_t * gap_operation_and(const gap_word_t *BMRESTRICT vect1, const gap_word_t *BMRESTRICT vect2, gap_word_t *BMRESTRICT tmp_buf, unsigned &dsize) BMNOEXCEPT
GAP AND operation.
unsigned gap_block_find(const T *BMRESTRICT buf, unsigned nbit, bm::id_t *BMRESTRICT prev) BMNOEXCEPT
Searches for the next 1 bit in the GAP block.
gap_word_t * gap_operation_sub(const gap_word_t *BMRESTRICT vect1, const gap_word_t *BMRESTRICT vect2, gap_word_t *BMRESTRICT tmp_buf, unsigned &dsize) BMNOEXCEPT
GAP SUB (AND NOT) operation.
BMFORCEINLINE bool gap_is_all_zero(const bm::gap_word_t *BMRESTRICT buf) BMNOEXCEPT
Checks if GAP block is all-zero.
unsigned * gap_convert_to_bitset_smart(unsigned *BMRESTRICT dest, const T *BMRESTRICT buf, id_t set_max) BMNOEXCEPT
Smart GAP block to bitblock conversion.
bool gap_shift_l1(T *BMRESTRICT buf, unsigned co_flag, unsigned *BMRESTRICT new_len) BMNOEXCEPT
Left shift GAP block by 1 bit.
void gap_add_to_bitset(unsigned *BMRESTRICT dest, const T *BMRESTRICT pcurr, unsigned len) BMNOEXCEPT
Adds(OR) GAP block to bitblock.
unsigned gap_bit_count_range_hint(const T *const buf, unsigned left, unsigned right, unsigned hint) BMNOEXCEPT
Counts 1 bits in GAP buffer in the closed [left, right] range using position hint to avoid bfind.
bool improve_gap_levels(const T *length, const T *length_end, T *glevel_len) BMNOEXCEPT
Finds optimal gap blocks lengths.
unsigned gap_capacity(const T *BMRESTRICT buf, const T *BMRESTRICT glevel_len) BMNOEXCEPT
Returs GAP block capacity.
unsigned gap_limit(const T *BMRESTRICT buf, const T *BMRESTRICT glevel_len) BMNOEXCEPT
Returs GAP block capacity limit.
BMFORCEINLINE bm::gap_word_t gap_length(const bm::gap_word_t *BMRESTRICT buf) BMNOEXCEPT
Returs GAP block length.
const unsigned set_array_mask
bool block_any(const bm::word_t *const BMRESTRICT block) BMNOEXCEPT
Returns "true" if one bit is set in the block Function check for block varieties.
const id64_t all_bits_mask
void for_each_nzblock(T ***root, unsigned size1, F &f)
bool block_is_all_one_range(const bm::word_t *const BMRESTRICT block, unsigned left, unsigned right) BMNOEXCEPT
Returns "true" if all bits are 1 in the block [left, right] Function check for block varieties.
const unsigned set_block_mask
bool check_block_one(const bm::word_t *blk, bool deep_scan) BMNOEXCEPT
Checks if block has only 1 bits.
unsigned gap_bfind(const T *BMRESTRICT buf, unsigned pos, unsigned *BMRESTRICT is_set) BMNOEXCEPT
bvector< Alloc > operator-(const bvector< Alloc > &bv1, const bvector< Alloc > &bv2)
const unsigned set_sub_array_size
BMFORCEINLINE void get_block_coord(BI_TYPE nb, unsigned &i, unsigned &j) BMNOEXCEPT
Recalc linear bvector block index into 2D matrix coordinates.
const unsigned bits_in_array
const unsigned set_total_blocks
unsigned char get_nibble(const unsigned char *arr, unsigned idx) BMNOEXCEPT
get nibble from the array
gap_word_t *(* gap_operation_func_type)(const gap_word_t *BMRESTRICT, const gap_word_t *BMRESTRICT, gap_word_t *BMRESTRICT, unsigned &)
const unsigned set_block_size_op
bool block_find_first_diff(const bm::word_t *BMRESTRICT blk, const bm::word_t *BMRESTRICT arg_blk, unsigned *BMRESTRICT pos) BMNOEXCEPT
Find first bit which is different between two blocks (GAP or bit)
const unsigned rs3_half_span
const unsigned gap_levels
const unsigned set_word_shift
BMFORCEINLINE void xor_swap(W &x, W &y) BMNOEXCEPT
XOR swap two variables.
bm::id64_t idx_arr_block_lookup_u64(const bm::id64_t *idx, bm::id64_t size, bm::id64_t nb, bm::id64_t start) BMNOEXCEPT
block boundaries look ahead U32
const unsigned set_block_size
unsigned long long int id64_t
bool block_any_range(const bm::word_t *const BMRESTRICT block, unsigned left, unsigned right) BMNOEXCEPT
Returns "true" if one bit is set in the block [left, right] Function check for block varieties.
const unsigned gap_equiv_len
const unsigned rs3_border0_1
bvector< Alloc > operator|(const bvector< Alloc > &bv1, const bvector< Alloc > &bv2)
const unsigned rs3_border1
void set_block_bits_u32(bm::word_t *BMRESTRICT block, const unsigned *BMRESTRICT idx, unsigned start, unsigned stop) BMNOEXCEPT
set bits in a bit-block using global index
bvector< Alloc > operator^(const bvector< Alloc > &bv1, const bvector< Alloc > &bv2)
unsigned idx_arr_block_lookup_u32(const unsigned *idx, unsigned size, unsigned nb, unsigned start) BMNOEXCEPT
block boundaries look ahead U32
const unsigned short set_bitscan_wave_size
Size of bit decode wave in words.
const unsigned set_array_shift
unsigned short gap_word_t
const unsigned rs3_border1_1
void(* gap_operation_to_bitset_func_type)(unsigned *, const gap_word_t *)
bm::id_t bvector_size_type
const unsigned rs3_border0
const unsigned gap_max_bits
const unsigned set_top_array_size
const unsigned set_block_shift
void for_each_nzblock_range(T ***root, N top_size, N nb_from, N nb_to, F &f) BMNOEXCEPT
const unsigned set_word_mask
bool find_not_null_ptr(const bm::word_t *const *const *arr, N start, N size, N *pos) BMNOEXCEPT
const unsigned bits_in_block
bool block_find_reverse(const bm::word_t *BMRESTRICT block, unsigned nbit_from, unsigned *BMRESTRICT found_nbit) BMNOEXCEPT
Reverse find 1.
void set_block_bits_u64(bm::word_t *BMRESTRICT block, const bm::id64_t *BMRESTRICT idx, bm::id64_t start, bm::id64_t stop) BMNOEXCEPT
set bits in a bit-block using global index
bool for_each_nzblock_if(T ***root, BI size1, F &f) BMNOEXCEPT
SIZE_TYPE block_find_rank(const bm::word_t *const block, SIZE_TYPE rank, unsigned nbit_from, unsigned &nbit_pos) BMNOEXCEPT
Find rank in block (GAP or BIT)
Structure with statistical information about memory allocation footprint, serialization projection,...
gap_word_t gap_levels[bm::gap_levels]
GAP block lengths in the bvect.
size_t max_serialize_mem
estimated maximum memory for serialization
void reset() BMNOEXCEPT
Reset statisctics.
size_t memory_used
memory usage for all blocks and service tables
allocation_policy(bm::strategy s=BM_BIT, const gap_word_t *glevels=bm::gap_len_table< true >::_len) BMNOEXCEPT
const gap_word_t * glevel_len
unsigned short idx
Current position in the bit list.
unsigned char bits[set_bitscan_wave_size *32]
bit list
size_type pos
Last bit position decode before.
unsigned short cnt
Number of ON bits.
const bm::word_t * ptr
Word pointer.
Information about current DGAP block.
const gap_word_t * ptr
Word pointer.
gap_word_t gap_len
Current dgap length.
Statistical information about bitset's memory allocation details.
Default GAP lengths table.
static gap_operation_to_bitset_func_type gap_op_to_bit(unsigned i)
static gap_operation_func_type gap_operation(unsigned i)
dgap_descr gap_
DGAP block related info.
bitblock_descr bit_
BitBlock related info.