(
tree->num_used ==
tree->num_alloc) {
71 tree->num_alloc = 2 *
tree->num_alloc;
82new_index =
tree->num_used++;
89parent_node =
tree->nodes + parent_index;
90new_node =
tree->nodes + new_index;
134new_node =
tree->nodes + new_index;
139new_node->
leftend= region_start;
166 tree->s_min = s_start;
167 tree->s_max = s_end;
219 if(frame == 0 ||
SIGN(frame) !=
253 if(in_q_start != tree_q_start)
271 Int4in_q_length, tree_q_length, in_s_length, tree_s_length;
283 if(in_q_length > tree_q_length)
285 if(in_q_length < tree_q_length)
290 if(in_s_length > tree_s_length)
292 if(in_s_length < tree_s_length)
341 ASSERT(target_offset <= root_node->rightend);
350tmp_index = root_node->
midptr;
351list_node = root_node;
352next_node =
tree->nodes + tmp_index;
353 while(tmp_index != 0) {
355in_q_start, next_node->
hsp,
356next_node->
leftptr, which_end);
358tmp_index = next_node->
midptr;
359 if(best_hsp == next_node->
hsp)
361 else if(best_hsp == in_hsp)
362list_node->
midptr= tmp_index;
364list_node = next_node;
365next_node =
tree->nodes + tmp_index;
373 if(target_offset < midpt)
374tmp_index = root_node->
leftptr;
375 else if(target_offset > midpt)
385next_node =
tree->nodes + tmp_index;
386 if(next_node->
hsp!=
NULL) {
393in_q_start, next_node->
hsp,
394next_node->
leftptr, which_end);
395 if(best_hsp == next_node->
hsp) {
398 else if(best_hsp == in_hsp) {
400 if(target_offset < midpt)
402 else if(target_offset > midpt)
408root_node = next_node;
441target_offset = in_q_start + in_hsp->
query.
offset;
443target_offset = in_q_start + in_hsp->
query.
end;
450 ASSERT(target_offset <= root_node->rightend);
455tmp_index = root_node->
midptr;
456 if(tmp_index != 0) {
458in_q_start, which_end)) {
468 if(target_offset < midpt)
469tmp_index = root_node->
leftptr;
470 else if(target_offset > midpt)
480next_node =
tree->nodes + tmp_index;
481 if(next_node->
hsp!=
NULL) {
488in_q_start, next_node->
hsp,
489next_node->
leftptr, which_end);
490 if(best_hsp == next_node->
hsp) {
493 else if(best_hsp == in_hsp) {
495 if(target_offset < midpt)
497 else if(target_offset > midpt)
503root_node = next_node;
516 Int4old_region_start;
544region_start = query_start - hsp->
query.
end;
545query_start = query_start -
548region_start = query_start + hsp->
query.
offset;
549region_end = query_start + hsp->
query.
end;
552nodes =
tree->nodes;
554 ASSERT(region_end <= nodes->rightend);
589index_subject_range =
FALSE;
596nodes =
tree->nodes;
597nodes[new_index].
leftptr= query_start;
598nodes[new_index].
midptr= 0;
599nodes[new_index].
hsp= hsp;
605 ASSERT(region_start >= nodes[root_index].leftend);
606 ASSERT(region_end <= nodes[root_index].rightend);
608middle = ((
Int8) nodes[root_index].leftend +
611 if(region_end < middle) {
616 if(nodes[root_index].leftptr == 0) {
617nodes[root_index].
leftptr= new_index;
626old_index = nodes[root_index].
leftptr;
627 if(nodes[old_index].hsp ==
NULL) {
628root_index = old_index;
635 else if(region_start > middle) {
640 if(nodes[root_index].rightptr == 0) {
641nodes[root_index].
rightptr= new_index;
650old_index = nodes[root_index].
rightptr;
651 if(nodes[old_index].hsp ==
NULL) {
652root_index = old_index;
665 if(index_subject_range || index_method ==
eQueryOnly||
674nodes[root_index].
midptr= new_index;
682index_subject_range =
TRUE;
684 if(nodes[root_index].midptr == 0) {
686 tree->s_max, &retval);
689nodes =
tree->nodes;
690nodes[root_index].
midptr= mid_index;
692root_index = nodes[root_index].
midptr;
712nodes =
tree->nodes;
713old_hsp = nodes[old_index].
hsp;
718nodes[root_index].
leftptr= mid_index;
720nodes[root_index].
rightptr= mid_index;
726 if(index_subject_range) {
738old_region_end = q_start - old_hsp->
query.
offset;
739old_region_start = q_start - old_hsp->
query.
end;
746root_index = mid_index;
747middle = ((
Int8) nodes[root_index].leftend +
749 if(old_region_end < middle) {
752nodes[mid_index].
leftptr= old_index;
754 else if(old_region_start > middle) {
757nodes[mid_index].
rightptr= old_index;
769 if(index_subject_range || index_method ==
eQueryOnly||
771nodes[mid_index].
midptr= old_index;
775 tree->s_max, &retval);
780nodes =
tree->nodes;
781nodes[mid_index].
midptr= mid_index2;
782middle = ((
Int8) nodes[mid_index2].leftend +
785 if(old_region_end < middle)
786nodes[mid_index2].
leftptr= old_index;
787 else if(old_region_start > middle)
788nodes[mid_index2].
rightptr= old_index;
790nodes[mid_index2].
midptr= old_index;
814 Int4min_diag_separation)
819 if(in_q_start != tree_q_start)
833 if(min_diag_separation == 0)
838min_diag_separation) ||
841min_diag_separation)) {
869 Int4min_diag_separation)
882 ASSERT(region_end <= node->rightend);
889tmp_index = node->
midptr;
890 while(tmp_index != 0) {
895min_diag_separation)) {
898tmp_index = tmp_node->
midptr;
907 if(region_end < middle)
909 else if(region_start > middle)
919node =
tree->nodes + tmp_index;
926min_diag_separation);
934 Int4min_diag_separation)
944 ASSERT(region_end <= node->rightend);
955 ASSERT(region_end <= node->rightend);
960tmp_index = node->
midptr;
964min_diag_separation)) {
975 if(region_end < middle)
977 else if(region_start > middle)
987node =
tree->nodes + tmp_index;
994min_diag_separation);
1019 if(in_q_start != tree_q_start ||
1063 Int4tree_hsp_offset;
1073 if(in_q_start != tree_q_start ||
1074in_score > tree_hsp->
score)
1083tree_hsp_end = tree_q_start - tree_hsp->
query.
offset;
1084tree_hsp_offset = tree_q_start - tree_hsp->
query.
end;
1086tree_hsp_offset = tree_q_start + tree_hsp->
query.
offset;
1087tree_hsp_end = tree_q_start + tree_hsp->
query.
end;
1090overlapStart = tree_hsp_offset;
1091 if( overlapStart < in_offset )
1092overlapStart = in_offset;
1093overlapEnd = tree_hsp_end;
1094 if( overlapEnd > in_end )
1095overlapEnd = in_end;
1096percOverlap = (
Int4)( 100*((
double)(overlapEnd - overlapStart) /
1097(in_end - in_offset)) );
1099 if( percOverlap >= masklevel )
1115 Int4in_query_start;
1125region_end = in_query_start - hsp->
query.
offset;
1126region_start = in_query_start - hsp->
query.
end;
1129region_start = in_query_start + hsp->
query.
offset;
1130region_end = in_query_start + hsp->
query.
end;
1143tmp_index = node->
midptr;
1144 while(tmp_index != 0) {
1147hsp->
score, in_query_start,
1152tmp_index = tmp_node->
midptr;
1161 if(region_end < middle)
1163 else if(region_start > middle)
1182node =
tree->nodes + tmp_index;
1187hsp->
score, in_query_start,
1206 Int4num_redundant = 0;
1209 ASSERT(region_end <= node->rightend);
1218 ASSERT(region_end <= node->rightend);
1224tmp_index = node->
midptr;
1225 while(tmp_index != 0) {
1230tmp_index = tmp_node->
midptr;
1239 if(region_end < middle)
1241 else if(region_start > middle)
1249 returnnum_redundant;
1251node =
tree->nodes + tmp_index;
1256 returnnum_redundant +
#define sfree(x)
Safe free a pointer: belongs to a higher level header.
Private interface for blast_gapalign.c.
#define MB_HSP_CLOSE(q1, s1, q2, s2, c)
Are the two HSPs within a given number of diagonals from each other?
Utilities for dealing with BLAST HSPs in the core of BLAST.
#define CONTAINED_IN_HSP(a, b, c, d, e, f)
TRUE if c is between a and b; f between d and e.
static Int4 s_IntervalNodeInit(BlastIntervalTree *tree, Int4 parent_index, enum EIntervalDirection dir, Int2 *ret_status)
Allocate a new node for an interval tree.
Int4 BlastIntervalTreeNumRedundant(const BlastIntervalTree *tree, const BlastHSP *hsp, const BlastQueryInfo *query_info)
Determine the number of HSPs within an interval tree whose query range envelops the input HSP.
Boolean BlastIntervalTreeContainsHSP(const BlastIntervalTree *tree, const BlastHSP *hsp, const BlastQueryInfo *query_info, Int4 min_diag_separation)
Determine whether an interval tree contains an HSP that envelops an input HSP.
static Int4 s_HSPQueryRangeIsContained(const BlastHSP *in_hsp, Int4 in_q_start, const BlastHSP *tree_hsp, Int4 tree_q_start)
Determine whether the query range of an HSP is contained within the query range of another HSP.
Int4 BlastIntervalTreeMasksHSP(const BlastIntervalTree *tree, const BlastHSP *hsp, const BlastQueryInfo *query_info, Int4 subtree_index, Int4 masklevel)
static Int4 s_GetQueryStrandOffset(const BlastQueryInfo *query_info, Int4 context)
Retrieves the start offset (within a set of concatentated query sequences) of the strand containing a...
static Boolean s_MidpointTreeContainsHSP(const BlastIntervalTree *tree, Int4 root_index, const BlastHSP *in_hsp, Int4 in_q_start, Int4 min_diag_separation)
Determine whether a subtree of an interval tree contains an HSP that envelops the input HSP.
void Blast_IntervalTreeReset(BlastIntervalTree *tree)
Empty an interval tree structure but do not free it.
static Boolean s_HSPIsContained(const BlastHSP *in_hsp, Int4 in_q_start, const BlastHSP *tree_hsp, Int4 tree_q_start, Int4 min_diag_separation)
Determine whether an HSP is contained within another HSP.
static Boolean s_IntervalTreeHasHSPEndpoint(BlastIntervalTree *tree, const BlastHSP *in_hsp, Int4 in_q_start, enum EIntervalDirection which_end)
Determine whether an interval tree contains one or more HSPs that share a common endpoint with the in...
BlastIntervalTree * Blast_IntervalTreeInit(Int4 q_start, Int4 q_end, Int4 s_start, Int4 s_end)
Initialize an interval tree structure.
static Int4 s_IntervalRootNodeInit(BlastIntervalTree *tree, Int4 region_start, Int4 region_end, Int2 *retval)
Allocate a new root node for an interval tree.
Int2 BlastIntervalTreeAddHSP(BlastHSP *hsp, BlastIntervalTree *tree, const BlastQueryInfo *query_info, EITreeIndexMethod index_method)
Add an HSP to an existing interval tree.
static const BlastHSP * s_HSPsHaveCommonEndpoint(const BlastHSP *in_hsp, Int4 in_q_start, const BlastHSP *tree_hsp, Int4 tree_q_start, enum EIntervalDirection which_end)
Determine whether an input HSP shares a common start- or endpoint with an HSP from an interval tree.
BlastIntervalTree * Blast_IntervalTreeFree(BlastIntervalTree *tree)
Deallocate an interval tree structure.
static Boolean s_MidpointTreeHasHSPEndpoint(BlastIntervalTree *tree, Int4 root_index, const BlastHSP *in_hsp, Int4 in_q_start, enum EIntervalDirection which_end)
Determine whether a subtree of an interval tree contains an HSP that shares a common endpoint with th...
EIntervalDirection
When allocating a node for an interval tree, this specifies which half of the parent node will be des...
@ eIntervalTreeRight
Node will handle right half of parent node.
@ eIntervalTreeNeither
No parent node is assumed.
@ eIntervalTreeLeft
Node will handle left half of parent node.
static Int4 s_HSPQueryRangeIsMasklevelContained(Int4 in_offset, Int4 in_end, Int4 in_score, Int4 in_q_start, const BlastHSP *tree_hsp, Int4 tree_q_start, const BlastQueryInfo *query_info, Int4 masklevel)
Determine whether the query range of an HSP overlaps within the query range of another HSP by a perce...
Interface for an interval tree, used for fast HSP containment tests.
EITreeIndexMethod
How HSPs added to an interval tree are indexed.
@ eQueryOnlyStrandIndifferent
Index by query offset only.
@ eQueryOnly
Index by query offset only.
@ eQueryAndSubject
Index by query and then by subject offset.
#define BLASTERR_MEMORY
System error: out of memory condition.
int16_t Int2
2-byte (16-bit) signed integer
int32_t Int4
4-byte (32-bit) signed integer
int64_t Int8
8-byte (64-bit) signed integer
const struct ncbi::grid::netcache::search::fields::SIZE size
#define SIGN(a)
return +1 for a > 0, -1 for a < 0
Uint1 Boolean
bool replacment for C
#define TRUE
bool replacment for C indicating true.
#define FALSE
bool replacment for C indicating false.
#define ASSERT
macro for assert.
static int match(PCRE2_SPTR start_eptr, PCRE2_SPTR start_ecode, uint16_t top_bracket, PCRE2_SIZE frame_size, pcre2_match_data *match_data, match_block *mb)
Int4 query_length
Length of this query, strand or frame.
Int4 query_offset
Offset of this query, strand or frame in the concatenated super-query.
Int1 frame
Frame number (-1, -2, -3, 0, 1, 2, or 3)
Structure holding all information about an HSP.
BlastSeg query
Query sequence info.
Int4 context
Context number of query.
BlastSeg subject
Subject sequence info.
Int4 score
This HSP's raw score.
Main structure describing an interval tree.
The query related information.
BlastContextInfo * contexts
Information per context.
Int2 frame
Translation frame.
Int4 offset
Start of hsp.
Structure describing a node of an interval tree.
Int4 leftend
The left endpoint of the region this node describes.
Int4 rightptr
Offset to the subtree describing the right half of the region.
Int4 rightend
The right endpoint of the region this node describes.
Int4 midptr
Used for linked list of segments that cross the center of the region.
BlastHSP * hsp
The HSP contained in this region (only non-NULL for leaf nodes)
Int4 leftptr
Offset to the subtree describing the left half of the region, OR the query start offset (leaf nodes o...
static CS_CONTEXT * context
RetroSearch is an open source project built by @garambo | Open a GitHub Issue
Search and Browse the WWW like it's 1997 | Search results from DuckDuckGo
HTML:
3.2
| Encoding:
UTF-8
| Version:
0.7.4