<
classAlloc>
44 template<
typenameTAlloc>
friend class bvector;
199first_bit = block ? 1 : 0;
224first_bit = block[0] & 1;
372this->
copy(blockman);
403 bm.top_blocks_ = btmp;
445 unsignedtop_block_sz = (unsigned)
546 const bm::word_t* ret = (blk_blk == 0) ? 0 : blk_blk[j];
563 return(blk_blk) ? blk_blk[j] : 0;
578 return(blk_blk) ? blk_blk[j] : 0;
599 return bm->top_blocks_root();
623 alloc_.free_bit_block(block);
648 unsigned i, j, i_from, j_from, i_to, j_to;
661 for(j = j_from; j <= j_to; ++j)
681 for(j = 0; j <= j_to; ++j)
689 for(
i= i_from;
i<= i_to; ++
i)
713 unsigned i, j, i_from, j_from, i_to, j_to;
735 for(j = j_from; j <= j_to; ++j)
755 for(j = 0; j <= j_to; ++j)
762 for(
i= i_from;
i<= i_to; ++
i)
794 alloc_.free_bit_block(old_block);
848new_block =
alloc_.alloc_bit_block();
994 unsignedthreshold = unsigned(this->
glen(
unsigned(level)) - 4u);
1002 if(res_len >= threshold)
1025::memcpy(tblk, tmp_buf, (res_len+1) *
sizeof(
gap_word_t));
1058new_blk =
alloc_.alloc_bit_block();
1077 unsignedcontent_flag,
1078 intinitial_block_type,
1079 int* actual_block_type,
1080 boolallow_null_ret=
true)
1091*actual_block_type = initial_block_type;
1092 if(block_flag == content_flag && allow_null_ret)
1100 if(initial_block_type == 0)
1102block =
alloc_.alloc_bit_block();
1134 if(initial_block_type == 0)
1136block =
alloc_.alloc_bit_block();
1259 unsigned(level), this->
glen());
1261::memcpy(gap_blk_ptr, gap_block_src,
1332blk_blk[j+0] = blk_blk[j+1] = blk_blk[j+2] = blk_blk[j+3] =
addr;
1392 if(gap_count < threashold)
1433 if(gap_count < threashold)
1456 if(gap_count < threashold)
1462 unsignedthreashold)
1549gap_block = gap_block ? gap_block :
BMGAP_PTR(block);
1587 if(!block && alloc)
1603 unsigned i,
unsignedj)
1611 alloc_.free_gap_block(gap_block, this->
glen());
1667 alloc_.free_bit_block(block);
1739 bm::id_tcnt_sum(0), effective_blocks(0), gap_len_sum(0);
1742 const bm::word_t* blk = sub_stat.blocks[j] = blk_blk[j];
1747sub_stat.full_blocks++;
1757gap_len_sum += (gp[0] >> 3);
1758sub_stat.gap_len_sum += (gp[0] >> 3);
1759sub_stat.gap_blocks++;
1764sub_stat.bc_arr[j] = bc;
1765sub_stat.bit_blocks++;
1771sub_stat.bv_count += bc;
1777sub_stat.empty_blocks++; sub_stat.bc_arr[j] = 0;
1782 BM_ASSERT(sub_stat.bit_blocks + sub_stat.gap_blocks +
1783sub_stat.empty_blocks +
1786 if(cnt_sum > non_sparse_cut_off
)
1789 if(effective_blocks > 1)
1797gap_len_sum += effective_blocks * 4;
1798 if(gap_len_sum < cnt_sum)
1800 unsigneddiff = cnt_sum - gap_len_sum;
1801 floatcut_off = cnt_sum * 0.14f;
1879 return alloc_.alloc_bit_block();
1891 alloc_.free_bit_block(block);
1914*ptr = (
gap_word_t)(((
len-1) << 3) | (level << 1) | (*src & 1));
1991 if(
i< top_blocks)
1992::memset(&new_blocks[
i], 0,
sizeof(
void*) * (top_blocks-
i));
2006 for( ; tb_cnt; --tb_cnt)
2043 #if defined(__GNUG__) 2044 #if defined( __has_warning ) 2045 #if __has_warning("-Wmaybe-uninitialized")
2046 #define BM_SUPPRESSING 2049 #define BM_SUPPRESSING 2051 #ifdef BM_SUPPRESSING 2052 #pragma GCC diagnostic push 2053 #pragma GCC diagnostic ignored "-Wmaybe-uninitialized" 2058 #ifdef BM_SUPPRESSING 2059 #pragma GCC diagnostic pop 2060 #undef BM_SUPPRESSING 2081 #define BM_FREE_OP(x) blk = blk_blk[j + x]; \ 2082 if (IS_VALID_ADDR(blk)) \ 2084 if (BM_IS_GAP(blk)) \ 2085 alloc_.free_gap_block(BMGAP_PTR(blk), glen()); \ 2087 alloc_.free_bit_block(blk); \ 2103 #if defined(BM64_AVX2) || defined(BM64_AVX512) 2112 #elif defined(BM64_SSE4) 2140 for(
unsigned i= 0;
i< top_blocks; )
2177 for(
unsigned i= 0;
i< top_blocks; ++
i)
2195 unsigned i= top_blocks - 1;
2196 for(;
i> 0; --
i)
2243 size_ts_size =
sizeof(unsigned);
2249 for(
unsigned i= 0;
i< top_blocks; )
2254s_size += nb_full ? 1+
sizeof(
block_idx_type) : 0; nb_full = 0;
2257 unsignednb_prev =
i++;
2273s_size += nb_empty ? 1+
sizeof(
block_idx_type) : 0; nb_empty = 0;
2284s_size += nb_full ? 1+
sizeof(
block_idx_type) : 0; nb_full = 0;
2290s_size += nb_empty ? 1+
sizeof(
block_idx_type) : 0; nb_empty = 0;
2295s_size += nb_empty ? 1+
sizeof(
block_idx_type) : 0; nb_empty = 0;
2296s_size += nb_full ? 1+
sizeof(
block_idx_type) : 0; nb_full = 0;
2339 if(new_level >= 0 && new_level < old_level)
2346gap_blk = new_gap_blk;
2363 if((block[0] & 1u))
2390 if(gap_count < threashold)
2433 for(
unsigned i= 0;
i< top_blocks; ++
i)
2483stat->add_scorrection();
2486 size_tblocks_mem =
sizeof(*this);
2487blocks_mem +=
sizeof(
bm::word_t**) * top_size;
2489stat->memory_used += blocks_mem;
2510 for(
unsigned i= 0;
i< top_size; ++
i)
2512 const bm::word_t*
const* blk_blk = blk_root[
i];
2519blk_blk = blk_root[
i];
2537 st->gap_blocks_sz +=
len;
2586 throwstd::bad_alloc();
2588BM_THROW(BM_ERR_BADALLOC);
2621 size_talloc_sz =
st.get_alloc_size();
2623 size_talloc_sz_v = (alloc_sz + (
sizeof(
void*)-1)) /
sizeof(
void*);
2625 char* arena_mem_ptr = (
char*) alloc.alloc_ptr(alloc_sz_v);
2626ar->
a_ptr_= arena_mem_ptr;
2628 if(
st.bit_blocks_sz)
2632arena_mem_ptr +=
st.bit_blocks_sz *
sizeof(
bm::word_t);
2640arena_mem_ptr +=
st.top_block_size *
sizeof(
void*);
2642 if(
st.ptr_sub_blocks_sz)
2645arena_mem_ptr +=
st.ptr_sub_blocks_sz *
sizeof(
void*);
2650 if(
st.gap_blocks_sz)
2668 size_talloc_sz = ar->st_.get_alloc_size();
2670 size_talloc_sz_v = (alloc_sz + (
sizeof(
void*)-1)) /
sizeof(
void*);
2671alloc.free_ptr(ar->a_ptr_, alloc_sz_v);
2710 for(
unsigned i= 0;
i< top_size; ++
i)
2712 const bm::word_t*
const* blk_blk_arg = blk_root_arg[
i];
2719blk_blk_arg = blk_root_arg[
i];
2730blk_root[
i] = t_blk_blk;
2747t_blk_blk[j] = blk_p;
2748t_gap_block +=
len;
2756t_blk_blk[j] = t_block;
2790 if(need_top_blocks < arg_top_blocks)
2791arg_top_blocks = unsigned(need_top_blocks);
2798 unsignedi_from, j_from, i_to, j_to;
2802 if(i_to >= arg_top_blocks-1)
2804i_to = arg_top_blocks-1;
2808 for(
unsigned i= i_from;
i<= i_to; ++
i)
2818 unsignedj = (
i== i_from) ? j_from : 0;
2821blk_root[
i] = blk_blk_arg;
2833 unsignedj = (
i== i_from) ? j_from : 0;
2839blk = blk_blk[j]; blk_arg = blk_blk_arg[j];
2856blk =
alloc_.alloc_bit_block();
2863}
while(j < j_limit);
2888 template<
classBlocksManager>
2899 bman_.get_allocator().free_bit_block(
block_, 3);
2911 attach(
bman_.get_allocator().alloc_bit_block(3));
2929 template<
typenamePOOL,
typenamePCLASS>
2939obj.set_allocator_pool(&pool);
2944 optr_->set_allocator_pool(0);
2951 if(!obj.get_allocator_pool())
2955 optr_->set_allocator_pool(&pool);
2971 #pragma warning( pop )static void * Alloc(size_t size)
#define IS_FULL_BLOCK(addr)
#define IS_VALID_ADDR(addr)
#define BMPTR_SETBIT0(ptr)
#define BMSET_PTRGAP(ptr)
#define BMPTR_CLEARBIT0(ptr)
#define FULL_BLOCK_FAKE_ADDR
#define FULL_SUB_BLOCK_REAL_ADDR
#define FULL_BLOCK_REAL_ADDR
void assign_if_not_set(POOL &pool, PCLASS &obj) BMNOEXCEPT
check if vector has no assigned allocator and set one
PCLASS * optr_
garded object
void operator=(const alloc_pool_guard &)=delete
alloc_pool_guard(const alloc_pool_guard &)=delete
~alloc_pool_guard() BMNOEXCEPT
alloc_pool_guard() BMNOEXCEPT
alloc_pool_guard(POOL &pool, PCLASS &obj) BMNOEXCEPT
bm::word_t * get() BMNOEXCEPT
bit_block_guard & operator=(const bit_block_guard &)
bit_block_guard(const bit_block_guard &)
bit_block_guard(BlocksManager &bman, bm::word_t *blk=0) BMNOEXCEPT
void attach(bm::word_t *blk) BMNOEXCEPT
Functor detects if any bit set.
block_any_func(const blocks_manager &bm) BMNOEXCEPT
Bitcounting functor filling the block counts array.
void operator()(const bm::word_t *block, id_type idx) BMNOEXCEPT
void on_non_empty_top(unsigned) BMNOEXCEPT
id_type last_block() const BMNOEXCEPT
block_count_arr_func(const blocks_manager &bm, unsigned *arr) BMNOEXCEPT
Base class for bitcounting functors.
bm::id_t block_count(const bm::word_t *block) const BMNOEXCEPT
block_count_base(const blocks_manager &bm) BMNOEXCEPT
bit value change counting functor
block_count_change_func(const blocks_manager &bm) BMNOEXCEPT
id_type count() const BMNOEXCEPT
void operator()(const bm::word_t *block, block_idx_type idx) BMNOEXCEPT
bm::id_t prev_block_border_bit_
block_idx_type block_count(const bm::word_t *block, block_idx_type idx) BMNOEXCEPT
id_type count() const BMNOEXCEPT
void operator()(const bm::word_t *block) BMNOEXCEPT
void add_full(id_type c) BMNOEXCEPT
block_count_func(const blocks_manager &bm) BMNOEXCEPT
Fill block with all-one bits functor.
void operator()(bm::word_t *block, block_idx_type idx)
block_one_func(blocks_manager &bm) BMNOEXCEPT
Base functor class connected for "constant" functors.
void on_empty_block(block_idx_type) BMNOEXCEPT
bm_func_base_const(const blocks_manager &bman) BMNOEXCEPT
bm_func_base_const & operator=(const bm_func_base_const &) BMNOEXCEPT
bm_func_base_const(const bm_func_base_const &) BMNOEXCEPT
void on_empty_top(unsigned) BMNOEXCEPT
const blocks_manager & bm_
Base functor class (block visitor)
void on_empty_top(unsigned) BMNOEXCEPT
bm_func_base(blocks_manager &bman) BMNOEXCEPT
bm_func_base(const bm_func_base &)
bm_func_base & operator=(const bm_func_base &)
void on_empty_block(block_idx_type) BMNOEXCEPT
const gap_word_t * glevel_len_
void on_non_empty_top(unsigned)
gap_level_func(blocks_manager &bm, const gap_word_t *glevel_len) BMNOEXCEPT
void operator()(bm::word_t *block, block_idx_type idx)
bitvector blocks manager Embedded class managing bit-blocks on very low level. Includes number of fun...
unsigned top_block_size_
Size of the top level block array in blocks_ tree.
void set_glen(const gap_word_t *glevel_len) BMNOEXCEPT
void zero_block(unsigned i, unsigned j) BMNOEXCEPT
Free block, make it zero pointer in the tree.
void set_all_zero(block_idx_type nb, block_idx_type nb_to) BMNOEXCEPT
set all-Zero block pointers for [start..end]
bm::word_t *** top_blocks_
Tree of blocks.
arena * arena_
memory arena pointer
void assign_gap(unsigned i, unsigned j, const bm::gap_word_t *res, unsigned res_len, bm::word_t *blk, gap_word_t *tmp_buf)
Attach the result of a GAP logical operation.
bm::word_t * get_block_ptr(unsigned i, unsigned j) BMNOEXCEPT
Finds block in 2-level blocks array (unsinitized)
void set_block_all_set(block_idx_type nb)
bm::word_t * check_allocate_tempblock()
unsigned find_max_top_blocks() const BMNOEXCEPT
calculate max top blocks size whithout NULL-tail
bm::word_t * deoptimize_block(block_idx_type nb)
Make sure block turns into true bit-block if it is GAP or a full block.
gap_word_t glevel_len_[bm::gap_levels]
vector defines gap block lengths for different levels
void return_tempblock(bm::word_t *block) BMNOEXCEPT
void set_all_set(block_idx_type nb, block_idx_type nb_to)
set all-set block pointers for [start..end]
const bm::word_t *const * get_topblock(unsigned i) const BMNOEXCEPT
Function returns top-level block in 2-level blocks array.
id_type max_bits_
maximum addresable bits
void validate_top_full(unsigned i) BMNOEXCEPT
void shrink_top_blocks()
shrink unused top blocks array (via reallocation)
allocator_type get_allocator() const BMNOEXCEPT
Returns allocator.
blocks_manager(const gap_word_t *glevel_len, id_type max_bits, const Alloc &alloc=Alloc())
void copy_to_arena(arena *ar) const BMNOEXCEPT
Copy blocks into arena allocated memory.
void init_tree(unsigned top_size)
allocate first level of descr. of blocks
bm::word_t * alloc_bit_block(block_idx_type nb)
Create(allocate) bit block.
void destroy_arena() BMNOEXCEPT
free all arena memory
bool is_subblock_null(unsigned nsub) const BMNOEXCEPT
Returns true if second level block pointer is 0.
void move_from(blocks_manager &bm) BMNOEXCEPT
implementation of moving semantics
void set_block_all_set_ptr(unsigned i, unsigned j)
Places new block into blocks table.
static void alloc_arena(arena *ar, const bm::bv_arena_statistics &st, allocator_type &alloc)
Allocate arena (content memory) based on arena statistics.
blocks_manager(blocks_manager &&blockman) BMNOEXCEPT
bm::gap_word_t * extend_gap_block(block_idx_type nb, gap_word_t *blk)
Extends GAP block to the next level or converts it to bit block.
void assign_gap_check(unsigned i, unsigned j, const bm::gap_word_t *res, unsigned res_len, bm::word_t *blk, gap_word_t *tmp_buf)
Attach the result of a GAP logical operation but check if it is all 000.
bm::word_t ** check_alloc_top_subblock(unsigned nblk_blk)
Allocate top sub-block if not allocated.
block_idx_type find_next_nz_block(block_idx_type nb, bool deep_scan=true) const BMNOEXCEPT
Find the next non-zero block starting from nb.
bm::word_t * temp_block_
Temp block.
bm::word_t *** top_blocks_root() const BMNOEXCEPT
Returns root block in the tree.
void deinit_tree() BMNOEXCEPT
const bm::word_t * get_block(block_idx_type nb, int *no_more_blocks) const BMNOEXCEPT
Returns current capacity (bits)
unsigned top_block_size() const BMNOEXCEPT
Returns size of the top block array in the tree.
void destroy_tree() BMNOEXCEPT
destroy tree, free memory in all blocks and control structures Note: pointers are NOT assigned to zer...
void free_temp_block() BMNOEXCEPT
bm::word_t * copy_bit_block(block_idx_type nb, const bm::word_t *block_src, int is_src_gap)
Create bit block as a copy of source block (bit or gap).
size_t calc_serialization_null_full() const BMNOEXCEPT
Calculate approximate memory needed to serialize big runs of 0000s and 111s (as blocks)
void free_top_subblock(unsigned nblk_blk) BMNOEXCEPT
bm::word_t * check_allocate_block(block_idx_type nb, int initial_block_type)
Function checks if block is not yet allocated, allocates and returns.
bm::word_t ** alloc_top_subblock(unsigned i, bm::word_t *addr)
Allocate subblock and fill based on.
const bm::word_t * get_block_ptr(unsigned i, unsigned j) const BMNOEXCEPT
Finds block in 2-level blocks array (unsinitized)
bm::word_t * set_gap_block(block_idx_type nb, const gap_word_t *gap_block_src, int level)
Allocate an place new GAP block (copy of provided block)
unsigned glen(unsigned level) const BMNOEXCEPT
Returns GAP level length for specified level.
void assign_gap(block_idx_type nb, const bm::gap_word_t *res, unsigned res_len, bm::word_t *blk, gap_word_t *tmp_buf)
Attach the result of a GAP logical operation.
void clone_gap_block(unsigned i, unsigned j, const bm::gap_word_t *gap_block, unsigned len)
Clone static known block, assign to i-j position.
allocator_type alloc_
allocator
void init_tree()
allocate first level of descr. of blocks
allocator_type & get_allocator() BMNOEXCEPT
Returns reference on the allocator.
bool is_init() const BMNOEXCEPT
if tree of blocks already up
void validate_top_zero(unsigned i) BMNOEXCEPT
void swap(blocks_manager &bm) BMNOEXCEPT
Swaps content.
void opt_copy_bit_block(unsigned i, unsigned j, const bm::word_t *src_block, int opt_mode, bm::word_t *tmp_block)
Optimize and copy bit-block.
bm::word_t *** top_blocks_root() BMNOEXCEPT
void deallocate_top_subblock(unsigned nblk_blk) BMNOEXCEPT
void optimize_gap_convert_bit_block(unsigned i, unsigned j, bm::word_t *block, unsigned threashold)
const bm::word_t * get_block(unsigned i, unsigned j) const BMNOEXCEPT
Finds block in 2-level blocks array.
void copy_to_arena(const blocks_manager &bman)
calculate arena statistics, calculate and copy all blocks there
void set_block_all_set_no_check(unsigned i, unsigned j)
bm::word_t * copy_block(block_idx_type idx, const blocks_manager &bm_src)
Copy block from another vector.
void optimize_bit_block_nocheck(unsigned i, unsigned j)
Full Optimize bit-block at i-j position (no checks)
bm::word_t * convert_gap2bitset(unsigned i, unsigned j, const gap_word_t *gap_block=0, unsigned len=0)
Converts block from type gap to conventional bitset block.
bm::word_t * check_allocate_block(block_idx_type nb, unsigned content_flag, int initial_block_type, int *actual_block_type, bool allow_null_ret=true)
Function checks if block is not yet allocated, allocates it and sets to all-zero or all-one bits.
void free_ptr(bm::word_t **ptr) BMNOEXCEPT
BMFORCEINLINE void set_block_ptr(unsigned i, unsigned j, bm::word_t *block) BMNOEXCEPT
Places new block into blocks table.
void operator=(const blocks_manager &)
void copy(const blocks_manager &blockman, block_idx_type block_from=0, block_idx_type block_to=bm::set_total_blocks)
const gap_word_t * glen() const BMNOEXCEPT
Returns current GAP level vector.
unsigned compute_top_block_size(id_type bits_to_store) const BMNOEXCEPT
Compute size of the block array needed to store bits.
unsigned find_real_top_blocks() const BMNOEXCEPT
calculate top blocks which are not NULL and not FULL
void zero_gap_block_ptr(unsigned i, unsigned j) BMNOEXCEPT
Free block, make it zero pointer in the tree.
blocks_manager(const blocks_manager &blockman)
void zero_block(block_idx_type nb) BMNOEXCEPT
Free block, make it zero pointer in the tree.
void set_block_all_set(unsigned i, unsigned j)
bm::word_t * deoptimize_block_no_check(bm::word_t *block, unsigned i, unsigned j)
bm::word_t * deoptimize_block(unsigned i, unsigned j, bool alloc)
deoptimize block and return bit-block ptr can return NULL if block does not exists or allocate (if re...
bm::word_t * make_bit_block(block_idx_type nb)
Create all-zeros bit block.
bm::gap_word_t * allocate_gap_block(unsigned level, const gap_word_t *src=0, const gap_word_t *glevel_len=0)
bool is_sparse_sblock(unsigned i, unsigned sparse_cut_off, bm::bv_sub_survey &sub_stat) const BMNOEXCEPT
Bit count all blocks to determine if it is very sparse.
static void free_arena(arena *ar, allocator_type &alloc) BMNOEXCEPT
Free arena (content memory) based on arena statistics.
void optimize_bit_block(unsigned i, unsigned j, int opt_mode)
Optimize bit-block at i-j position.
bm::word_t * borrow_tempblock()
void optimize_block(unsigned i, unsigned j, bm::word_t *block, bm::word_t *temp_block, int opt_mode, bv_statistics *bv_stat)
void calc_arena_stat(bm::bv_arena_statistics *st) const BMNOEXCEPT
Calculates bitvector arena statistics.
bm::word_t * set_block(unsigned i, unsigned j, bm::word_t *block, bool gap)
Places new block into descriptors table, returns old block's address.
~blocks_manager() BMNOEXCEPT
bm::word_t ** alloc_top_subblock(unsigned nblk_blk)
unsigned reserve_top_blocks(unsigned top_blocks)
Make sure blocks manager has enough blocks capacity.
bm::word_t * set_block(block_idx_type nb, bm::word_t *block)
Places new block into descriptors table, returns old block's address.
bm::word_t * convert_gap2bitset(block_idx_type nb, const gap_word_t *gap_block=0)
Converts block from type gap to conventional bitset block.
void optimize_tree(bm::word_t *temp_block, int opt_mode, bv_statistics *bv_stat)
void set_block_ptr(block_idx_type nb, bm::word_t *block)
Places new block into blocks table.
bm::word_t * clone_gap_block(const bm::gap_word_t *gap_block, bool &gap_res)
Clone GAP block from another GAP It can mutate into a bit-block if does not fit.
void set_all_zero(bool) BMNOEXCEPT
Fills all blocks with 0.
void copy_bit_block(unsigned i, unsigned j, const bm::word_t *src_block)
Allocate and copy block.
static bm::id_t block_bitcount(const bm::word_t *block) BMNOEXCEPT
Count number of bits ON in the block.
bm::word_t * clone_assign_block(unsigned i, unsigned j, const bm::word_t *src_block, bool invert=false)
Clone block, assign to i-j position.
void reserve(id_type max_bits)
reserve capacity for specified number of bits
void stat_correction(bv_statistics *stat) noexcept
void set_block_gap_ptr(block_idx_type nb, gap_word_t *gap_blk)
Mark pointer as GAP and assign to the blocks tree.
bm::word_t * set_block(block_idx_type nb, bm::word_t *block, bool gap)
Places new block into descriptors table, returns old block's address.
Bitvector Bit-vector container with runtime compression of bits.
static vector< string > arr
bool avx2_test_all_zero_wave(const void *ptr)
check if wave of pointers is all NULL
bool sse42_test_all_zero_wave(const void *ptr) noexcept
check if wave of pointers is all NULL
bm::id_t bit_block_count(const bm::word_t *block) noexcept
Bitcount for bit block.
void bit_invert(T *start) noexcept
void bit_block_stream(bm::word_t *dst, const bm::word_t *src) noexcept
Bitblock copy/stream operation.
bool is_bits_one(const bm::wordop_t *start) noexcept
Returns "true" if all bits in the block are 1.
bool bit_is_all_zero(const bm::word_t *start) noexcept
Returns "true" if all bits in the block are 0.
void bit_block_set(bm::word_t *dst, bm::word_t value) noexcept
Bitblock memset operation.
void bit_block_copy(bm::word_t *dst, const bm::word_t *src) noexcept
Bitblock copy operation.
unsigned bit_block_calc_change(const bm::word_t *block) noexcept
bool gap_is_all_one(const bm::gap_word_t *buf) noexcept
Checks if GAP block is all-one.
void gap_invert(T *buf) noexcept
Inverts all bits in the GAP buffer.
void set_gap_level(T *buf, int level) noexcept
Sets GAP block capacity level.
unsigned gap_test_unr(const T *buf, const unsigned pos) noexcept
Tests if bit = pos is true. Analog of bm::gap_test with SIMD unrolling.
bool gap_is_all_zero(const bm::gap_word_t *buf) noexcept
Checks if GAP block is all-zero.
void gap_convert_to_bitset(unsigned *dest, const T *buf, unsigned len=0) noexcept
GAP block to bitblock conversion.
unsigned gap_bit_count_unr(const T *buf) noexcept
Calculates number of bits ON in GAP buffer. Loop unrolled version.
unsigned gap_capacity(const T *buf, const T *glevel_len) noexcept
Returs GAP block capacity.
T gap_level(const T *buf) noexcept
Returs GAP blocks capacity level.
int gap_calc_level(unsigned len, const T *glevel_len) noexcept
Calculates GAP block capacity level.
bm::gap_word_t gap_length(const bm::gap_word_t *buf) noexcept
Returs GAP block length.
void gap_set_all(T *buf, unsigned set_max, unsigned value) noexcept
Sets all bits to 0 or 1 (GAP)
const unsigned set_array_mask
const unsigned gap_max_level
void xor_swap(W &x, W &y) noexcept
XOR swap two variables.
bool find_not_null_ptr(const bm::word_t *const *const *arr, N start, N size, N *pos) noexcept
const unsigned set_sub_array_size
void get_block_coord(BI_TYPE nb, unsigned &i, unsigned &j) noexcept
Recalc linear bvector block index into 2D matrix coordinates.
bool check_block_zero(const bm::word_t *blk, bool deep_scan) noexcept
Checks all conditions and returns true if block consists of only 0 bits.
const unsigned set_total_blocks
void bit_block_change_bc(const bm::word_t *block, unsigned *gc, unsigned *bc) noexcept
const unsigned gap_levels
const unsigned set_block_size
unsigned bit_to_gap(gap_word_t *dest, const unsigned *block, unsigned dest_len) noexcept
Convert bit block to GAP representation.
unsigned long long int id64_t
const unsigned gap_equiv_len
const unsigned gap_max_buff_len
const unsigned set_array_shift
unsigned short gap_word_t
const unsigned gap_max_bits
const unsigned set_top_array_size
void for_each_block(T ***root, unsigned size1, F &f, BLOCK_IDX start)
bool is_aligned(T *p) noexcept
Check pointer alignment.
const unsigned bits_in_block
static SLJIT_INLINE sljit_ins st(sljit_gpr r, sljit_s32 d, sljit_gpr x, sljit_gpr b)
Arena allocation memory guard.
arena_guard(arena *ar, blocks_manager &bman) noexcept
Allocation arena for ReadOnly vectors.
void reset()
Set all arena fields to zero.
bm::bv_arena_statistics st_
statistics and sizes
bm::word_t ** blk_blks_
PTR sub-blocks area.
void * a_ptr_
main allocated pointer
bm::gap_word_t * gap_blocks_
GAP blocks area.
bm::word_t *** top_blocks_
top descriptor
bm::word_t * blocks_
bit-blocks area
Structure with statistical information about memory allocation for arena based vectors.
size_t bit_blocks_sz
Total size of bit blocks.
size_t gap_blocks_sz
Total size of gap blocks.
size_t ptr_sub_blocks_sz
Total size of sub-blocks ptrs.
unsigned top_block_size
size of top descriptor
void reset() noexcept
Reset statisctics.
Structure with statistical information about memory allocation footprint, serialization projection,...
void add_gap_block(unsigned capacity, unsigned length, unsigned level) noexcept
count gap block
size_t ptr_sub_blocks
Number of sub-blocks.
void add_bit_block() noexcept
cound bit block
size_t max_serialize_mem
estimated maximum memory for serialization
Basic stats on second level group of blocks.
Default GAP lengths table.
RetroSearch is an open source project built by @garambo | Open a GitHub Issue
Search and Browse the WWW like it's 1997 | Search results from DuckDuckGo
HTML:
3.2
| Encoding:
UTF-8
| Version:
0.7.4