use a std::map to search file hashes instead of sweeping the entire database (ouch)

git-svn-id: http://svn.code.sf.net/p/retroshare/code/trunk@5963 b45a01b8-16f6-495d-af2f-9b41ad6348cc
This commit is contained in:
csoler 2012-12-10 22:25:57 +00:00
parent 3dd81f3e77
commit 2f7b7c4366
3 changed files with 44 additions and 4 deletions

View File

@ -845,6 +845,7 @@ void FileIndexMonitor::updateCycle()
if (fiMods)
locked_saveFileIndexes(true) ;
fi.updateHashIndex() ; // update hash map that is used to accelerate search.
fi.updateMaxModTime() ; // Update modification times for proper display.
mInCheck = false;
@ -978,10 +979,10 @@ void FileIndexMonitor::hashFiles(const std::vector<DirContentToHash>& to_hash)
/* don't hit the disk too hard! */
#ifndef WINDOWS_SYS
/********************************** WINDOWS/UNIX SPECIFIC PART ******************/
usleep(40000); /* 40 msec */
usleep(10000); /* 10 msec */
#else
Sleep(40);
Sleep(10);
#endif
// Save the hashing result every 60 seconds, so has to save what is already hashed.
@ -996,6 +997,7 @@ void FileIndexMonitor::hashFiles(const std::vector<DirContentToHash>& to_hash)
sleep(1) ;
#endif
RsStackMutex stack(fiMutex); /**** LOCKED DIRS ****/
fi.updateHashIndex() ;
FileIndexMonitor::locked_saveFileIndexes(true) ;
last_save_size = hashed_size ;
@ -1007,6 +1009,8 @@ void FileIndexMonitor::hashFiles(const std::vector<DirContentToHash>& to_hash)
running = isRunning();
}
fi.updateHashIndex() ;
cb->notifyListChange(NOTIFY_LIST_DIRLIST_LOCAL, 0);
}

View File

@ -515,6 +515,7 @@ FileIndex::FileIndex(const std::string& pid)
{
root = new PersonEntry(pid);
registerEntry(root) ;
_file_hashes.clear() ;
}
FileIndex::~FileIndex()
@ -552,9 +553,28 @@ int FileIndex::setRootDirectories(const std::list<std::string> &inlist, time_t u
(it->second)->updtime = updtime;
}
// update file hash index.
updateHashIndex() ;
return 1;
}
void FileIndex::updateHashIndex()
{
_file_hashes.clear() ;
recursUpdateHashIndex(root) ;
}
void FileIndex::recursUpdateHashIndex(DirEntry *dir)
{
for(std::map<std::string,DirEntry*>::iterator it(dir->subdirs.begin());it!=dir->subdirs.end();++it)
recursUpdateHashIndex(it->second) ;
for(std::map<std::string,FileEntry*>::iterator it(dir->files.begin());it!=dir->files.end();++it)
_file_hashes[it->second->hash] = it->second ;
}
void FileIndex::updateMaxModTime()
{
RecursUpdateMaxModTime(root) ;
@ -632,6 +652,7 @@ FileEntry *FileIndex::updateFileEntry(const std::string& fpath, const FileEntry&
#endif
return NULL;
}
return parent -> updateFile(fe, utime);
}
@ -909,6 +930,8 @@ int FileIndex::loadIndex(const std::string& filename, const std::string& expecte
}
}
updateHashIndex() ;
return 1;
/* parse error encountered */
@ -1072,10 +1095,17 @@ int DirEntry::saveEntry(std::string &s)
int FileIndex::searchHash(const std::string& hash, std::list<FileEntry *> &results) const
{
#ifdef FI_DEBUG
//#ifdef FI_DEBUG
std::cerr << "FileIndex::searchHash(" << hash << ")";
std::cerr << std::endl;
#endif
//#endif
std::map<std::string,FileEntry*>::const_iterator it = _file_hashes.find(hash) ;
if(it!=_file_hashes.end() && isValid((void*)it->second))
results.push_back(it->second) ;
#ifdef OLD_CODE_PLZ_REMOVE
DirEntry *ndir = NULL;
std::list<DirEntry *> dirlist;
dirlist.push_back(root);
@ -1106,6 +1136,7 @@ int FileIndex::searchHash(const std::string& hash, std::list<FileEntry *> &resul
}
}
}
#endif
return 0;
}

View File

@ -257,6 +257,11 @@ class FileIndex
void *findRef(const std::string& path) const ;
bool extractData(const std::string& path,DirDetails& details) const ;
void updateHashIndex() ;
void recursUpdateHashIndex(DirEntry *) ;
std::map<std::string,FileEntry*> _file_hashes ;
};