mirror of
https://github.com/RetroShare/RetroShare.git
synced 2025-02-24 17:00:27 -05:00
added display of hashing speed. Changed hashing buffer size to 10MB to improve performance
This commit is contained in:
parent
2e9f5202e0
commit
3bb694f439
@ -24,6 +24,7 @@
|
||||
*/
|
||||
#include "util/rsdir.h"
|
||||
#include "util/rsprint.h"
|
||||
#include "util/rsscopetimer.h"
|
||||
#include "rsserver/p3face.h"
|
||||
#include "pqi/authssl.h"
|
||||
#include "hash_cache.h"
|
||||
@ -43,8 +44,11 @@ HashStorage::HashStorage(const std::string& save_file_name)
|
||||
mLastSaveTime = 0 ;
|
||||
mTotalSizeToHash = 0;
|
||||
mTotalFilesToHash = 0;
|
||||
mCurrentHashingSpeed = 0 ;
|
||||
mMaxStorageDurationDays = DEFAULT_HASH_STORAGE_DURATION_DAYS ;
|
||||
mHashingProcessPaused = false;
|
||||
mHashedBytes = 0 ;
|
||||
mHashingTime = 0 ;
|
||||
|
||||
{
|
||||
RS_STACK_MUTEX(mHashMtx) ;
|
||||
@ -178,32 +182,48 @@ void HashStorage::data_tick()
|
||||
#endif
|
||||
|
||||
std::string tmpout;
|
||||
rs_sprintf(tmpout, "%lu/%lu (%s - %d%%) : %s", (unsigned long int)mHashCounter+1, (unsigned long int)mTotalFilesToHash, friendlyUnit(mTotalHashedSize).c_str(), int(mTotalHashedSize/double(mTotalSizeToHash)*100.0), job.full_path.c_str()) ;
|
||||
|
||||
if(mCurrentHashingSpeed > 0)
|
||||
rs_sprintf(tmpout, "%lu/%lu (%s - %d%%, %d MB/s) : %s", (unsigned long int)mHashCounter+1, (unsigned long int)mTotalFilesToHash, friendlyUnit(mTotalHashedSize).c_str(), int(mTotalHashedSize/double(mTotalSizeToHash)*100.0), mCurrentHashingSpeed,job.full_path.c_str()) ;
|
||||
else
|
||||
rs_sprintf(tmpout, "%lu/%lu (%s - %d%%) : %s", (unsigned long int)mHashCounter+1, (unsigned long int)mTotalFilesToHash, friendlyUnit(mTotalHashedSize).c_str(), int(mTotalHashedSize/double(mTotalSizeToHash)*100.0), job.full_path.c_str()) ;
|
||||
|
||||
RsServer::notify()->notifyHashingInfo(NOTIFY_HASHTYPE_HASH_FILE, tmpout) ;
|
||||
|
||||
if(RsDirUtil::getFileHash(job.full_path, hash,size, this))
|
||||
{
|
||||
// store the result
|
||||
double seconds_origin = RsScopeTimer::currentTime() ;
|
||||
|
||||
if(RsDirUtil::getFileHash(job.full_path, hash,size, this))
|
||||
{
|
||||
// store the result
|
||||
|
||||
#ifdef HASHSTORAGE_DEBUG
|
||||
std::cerr << "done."<< std::endl;
|
||||
std::cerr << "done."<< std::endl;
|
||||
#endif
|
||||
|
||||
RS_STACK_MUTEX(mHashMtx) ;
|
||||
HashStorageInfo& info(mFiles[job.real_path]);
|
||||
RS_STACK_MUTEX(mHashMtx) ;
|
||||
HashStorageInfo& info(mFiles[job.real_path]);
|
||||
|
||||
info.filename = job.real_path ;
|
||||
info.size = size ;
|
||||
info.modf_stamp = job.ts ;
|
||||
info.time_stamp = time(NULL);
|
||||
info.hash = hash;
|
||||
info.filename = job.real_path ;
|
||||
info.size = size ;
|
||||
info.modf_stamp = job.ts ;
|
||||
info.time_stamp = time(NULL);
|
||||
info.hash = hash;
|
||||
|
||||
mChanged = true ;
|
||||
mTotalHashedSize += size ;
|
||||
}
|
||||
else
|
||||
std::cerr << "ERROR: cannot hash file " << job.full_path << std::endl;
|
||||
mChanged = true ;
|
||||
mTotalHashedSize += size ;
|
||||
}
|
||||
else
|
||||
std::cerr << "ERROR: cannot hash file " << job.full_path << std::endl;
|
||||
|
||||
mHashingTime += RsScopeTimer::currentTime() - seconds_origin ;
|
||||
mHashedBytes += size ;
|
||||
|
||||
if(mHashingTime > 3)
|
||||
{
|
||||
mCurrentHashingSpeed = (int)(mHashedBytes / mHashingTime ) / (1024*1024) ;
|
||||
mHashingTime = 0 ;
|
||||
mHashedBytes = 0 ;
|
||||
}
|
||||
|
||||
++mHashCounter ;
|
||||
}
|
||||
|
@ -140,5 +140,11 @@ private:
|
||||
uint64_t mTotalHashedSize ;
|
||||
uint64_t mTotalFilesToHash ;
|
||||
time_t mLastSaveTime ;
|
||||
|
||||
// The following is used to estimate hashing speed.
|
||||
|
||||
double mHashingTime ;
|
||||
uint64_t mHashedBytes ;
|
||||
uint32_t mCurrentHashingSpeed ; // in MB/s
|
||||
};
|
||||
|
||||
|
@ -536,7 +536,10 @@ bool RsDirUtil::getFileHash(const std::string& filepath, RsFileHash &hash, uint6
|
||||
int len;
|
||||
SHA_CTX *sha_ctx = new SHA_CTX;
|
||||
unsigned char sha_buf[SHA_DIGEST_LENGTH];
|
||||
unsigned char gblBuf[512];
|
||||
|
||||
static const uint32_t HASH_BUFFER_SIZE = 1024*1024*10 ;// allocate a 10MB buffer. Too small a buffer will cause multiple HD hits and slow down the hashing process.
|
||||
RsTemporaryMemory gblBuf(HASH_BUFFER_SIZE) ;
|
||||
//unsigned char gblBuf[512];
|
||||
|
||||
/* determine size */
|
||||
fseeko64(fd, 0, SEEK_END);
|
||||
@ -548,7 +551,7 @@ bool RsDirUtil::getFileHash(const std::string& filepath, RsFileHash &hash, uint6
|
||||
int runningCheckCount = 0;
|
||||
|
||||
SHA1_Init(sha_ctx);
|
||||
while(isRunning && (len = fread(gblBuf,1, 512, fd)) > 0)
|
||||
while(isRunning && (len = fread(gblBuf,1, HASH_BUFFER_SIZE, fd)) > 0)
|
||||
{
|
||||
SHA1_Update(sha_ctx, gblBuf, len);
|
||||
|
||||
|
@ -43,8 +43,7 @@ public:
|
||||
void start();
|
||||
double duration();
|
||||
|
||||
private:
|
||||
double currentTime();
|
||||
static double currentTime();
|
||||
|
||||
private:
|
||||
std::string _name ;
|
||||
|
Loading…
x
Reference in New Issue
Block a user