Merge branch 'qinzongyue' into bookug

merge qinzongyue's code to fix bugs
This commit is contained in:
bookug 2017-07-16 21:44:48 +08:00
commit 9bed137382
3 changed files with 25 additions and 6 deletions

View File

@ -693,11 +693,11 @@ Database::get_important_preID()
{
important_preID.clear();
unsigned max_degree = 0;
for(TYPE_PREDICATE_ID i = 0; i <= limitID_predicate; ++i)
for(TYPE_PREDICATE_ID i = 0; i < limitID_predicate; ++i)
if (pre2num[i] > max_degree)
max_degree = pre2num[i];
unsigned limit_degree = max_degree / 2;
for(TYPE_PREDICATE_ID i = 0; i <= limitID_predicate; ++i)
for(TYPE_PREDICATE_ID i = 0; i < limitID_predicate; ++i)
if (pre2num[i] > limit_degree)
important_preID.push_back(i);
}
@ -745,7 +745,7 @@ Database::get_candidate_preID()
std::priority_queue <KEY_SIZE_VALUE, deque<KEY_SIZE_VALUE>, greater<KEY_SIZE_VALUE> > rubbish;
while(!rubbish.empty()) rubbish.pop();
while(!candidate_preID.empty()) candidate_preID.pop();
for(TYPE_PREDICATE_ID i = 0; i <= limitID_predicate; ++i)
for(TYPE_PREDICATE_ID i = 0; i < limitID_predicate; ++i)
{
unsigned _value = 0;
unsigned _size;
@ -755,6 +755,7 @@ Database::get_candidate_preID()
if (!VList::isLongList(_size)) continue; // only long list need to be stored in cache
_value = pre2num[i];
if (_value == 0) continue;
if (_size + now_total_size < max_total_size)
{
@ -834,15 +835,16 @@ Database::get_important_subID()
{
while(!important_subID.empty()) important_subID.pop();
unsigned now_total_size = 0;
const string invalid = "";
const unsigned max_total_size = 2000000000;//2G
std::priority_queue <KEY_SIZE_VALUE, deque<KEY_SIZE_VALUE>, greater<KEY_SIZE_VALUE> > rubbish;
while(!rubbish.empty()) rubbish.pop();
// a sub who has largest degree with important pre is important subs
for(TYPE_ENTITY_LITERAL_ID i = 0; i <= limitID_entity; ++i)
for(TYPE_ENTITY_LITERAL_ID i = 0; i < limitID_entity; ++i)
{
unsigned _value = 0;
unsigned _size;
if (this->kvstore->getEntityByID(i) == invalid) continue;
_size = this->kvstore->getSubListSize(i);
if (!VList::isLongList(_size)) continue; // only long list need to be stored in cache
@ -893,13 +895,18 @@ Database::get_important_objID()
while(!important_objID.empty()) important_objID.pop();
unsigned now_total_size = 0;
const unsigned max_total_size = 2000000000;//2G
const string invalid = "";
std::priority_queue <KEY_SIZE_VALUE, deque<KEY_SIZE_VALUE>, greater<KEY_SIZE_VALUE> > rubbish;
while(!rubbish.empty()) rubbish.pop();
// a sub who has largest degree with important pre is important subs
for(TYPE_ENTITY_LITERAL_ID i = 0; i <= limitID_literal; ++i)
for(TYPE_ENTITY_LITERAL_ID i = 0; i < limitID_literal; ++i)
{
unsigned _value = 0;
unsigned _size;
string _tmp;
if (i < limitID_entity) _tmp = this->kvstore->getEntityByID(i);
else _tmp = this->kvstore->getLiteralByID(i);
if (_tmp == invalid) continue;
_size = this->kvstore->getObjListSize(i);
if (!VList::isLongList(_size)) continue; // only long list need to be stored in cache

View File

@ -622,6 +622,17 @@ IVTree::release(IVNode* _np) const
delete _np;
}
void
IVTree::AddIntoCache(TYPE_PREDICATE_ID _id)
{
char* _tmp = NULL;
unsigned _len;
this->search(_id, _tmp, _len);
// cout << "len is " << len << endl;
this->value_list->AddIntoCache(_id, _tmp, _len);
delete [] _tmp;
}
void
IVTree::AddIntoCache(TYPE_ENTITY_LITERAL_ID _id)
{

View File

@ -88,6 +88,7 @@ public:
void resetStream();
bool range_query(unsigned _key1, unsigned _key2);
bool save();
void AddIntoCache(TYPE_PREDICATE_ID _id);
void AddIntoCache(TYPE_ENTITY_LITERAL_ID _id);
~IVTree();
void print(std::string s); //DEBUG(print the tree)