fixed not rehashing files that already exist in a different directory pointed by a symlink

This commit is contained in:
csoler 2017-09-25 20:56:35 +02:00
parent 4766a8927a
commit d8cb3fe35a
2 changed files with 29 additions and 16 deletions

View File

@ -147,6 +147,7 @@ bool LocalDirectoryUpdater::sweepSharedDirectories()
#ifdef DEBUG_LOCAL_DIR_UPDATER
std::cerr << "[directory storage] recursing into " << stored_dir_it.name() << std::endl;
#endif
existing_dirs.insert(RsDirUtil::removeSymLinks(stored_dir_it.name()));
recursUpdateSharedDir(stored_dir_it.name(), *stored_dir_it,existing_dirs,1) ; // here we need to use the list that was stored, instead of the shared dir list, because the two
// are not necessarily in the same order.
@ -164,18 +165,6 @@ void LocalDirectoryUpdater::recursUpdateSharedDir(const std::string& cumulated_p
std::cerr << "[directory storage] parsing directory " << cumulated_path << ", index=" << indx << std::endl;
#endif
if(mFollowSymLinks && mIgnoreDuplicates)
{
std::string real_path = RsDirUtil::removeSymLinks(cumulated_path) ;
if(existing_directories.end() != existing_directories.find(real_path))
{
std::cerr << "(WW) Directory " << cumulated_path << " has real path " << real_path << " which already belongs to another shared directory. Ignoring" << std::endl;
return ;
}
existing_directories.insert(real_path) ;
}
// make sure list of subdirs is the same
// make sure list of subfiles is the same
// request all hashes to the hashcache
@ -210,13 +199,32 @@ void LocalDirectoryUpdater::recursUpdateSharedDir(const std::string& cumulated_p
break;
case librs::util::FolderIterator::TYPE_DIR:
{
bool dir_is_accepted = true ;
if( (mMaxShareDepth > 0u && current_depth <= mMaxShareDepth) || (mMaxShareDepth==0 && current_depth < 64)) // 64 is here as a safe limit, to make loops impossible.
if( (mMaxShareDepth > 0u && current_depth > mMaxShareDepth) || (mMaxShareDepth==0 && current_depth >= 64)) // 64 is here as a safe limit, to make loops impossible.
dir_is_accepted = false ;
if(dir_is_accepted && mFollowSymLinks && mIgnoreDuplicates)
{
std::string real_path = RsDirUtil::removeSymLinks(cumulated_path + "/" + dirIt.file_name()) ;
if(existing_directories.end() != existing_directories.find(real_path))
{
std::cerr << "(WW) Directory " << cumulated_path << " has real path " << real_path << " which already belongs to another shared directory. Ignoring" << std::endl;
dir_is_accepted = false ;
}
else
existing_directories.insert(real_path) ;
}
if(dir_is_accepted)
subdirs.insert(dirIt.file_name());
#ifdef DEBUG_LOCAL_DIR_UPDATER
std::cerr << " adding sub-dir \"" << dirIt.file_name() << "\"" << std::endl;
#endif
}
break;
default:
std::cerr << "(EE) Dir entry of unknown type with path \"" << cumulated_path << "/" << dirIt.file_name() << "\"" << std::endl;

View File

@ -223,8 +223,10 @@ bool HashStorage::requestHash(const std::string& full_path,uint64_t size,time_t
#endif
RS_STACK_MUTEX(mHashMtx) ;
std::string real_path = RsDirUtil::removeSymLinks(full_path) ;
time_t now = time(NULL) ;
std::map<std::string,HashStorageInfo>::iterator it = mFiles.find(full_path) ;
std::map<std::string,HashStorageInfo>::iterator it = mFiles.find(real_path) ;
// On windows we compare the time up to +/- 3600 seconds. This avoids re-hashing files in case of daylight saving change.
//
@ -261,7 +263,7 @@ bool HashStorage::requestHash(const std::string& full_path,uint64_t size,time_t
// we need to schedule a re-hashing
if(mFilesToHash.find(full_path) != mFilesToHash.end())
if(mFilesToHash.find(real_path) != mFilesToHash.end())
return false ;
FileHashJob job ;
@ -272,7 +274,10 @@ bool HashStorage::requestHash(const std::string& full_path,uint64_t size,time_t
job.full_path = full_path ;
job.ts = mod_time ;
mFilesToHash[full_path] = job;
// We store the files indexed by their real path, so that we allow to not re-hash files that are pointed multiple times through the directory links
// The client will be notified with the full path instead of the real path.
mFilesToHash[real_path] = job;
mTotalSizeToHash += size ;
++mTotalFilesToHash;