use file digest
This commit is contained in:
parent
98b231a221
commit
3b4e4d7113
1 changed files with 2 additions and 3 deletions
|
@ -47,9 +47,8 @@ class find_duplicate_files:
|
|||
if file.is_file():
|
||||
hash = hashlib.sha256()
|
||||
with open(file.path, 'r') as file_pointer:
|
||||
file_content = file_pointer.read()
|
||||
hash.update(file_content.encode('utf-8'))
|
||||
files[hash.hexdigest()] = file.path
|
||||
digest = hashlib.file_digest(f, "sha256")
|
||||
files[digest.hexdigest()] = file.path
|
||||
elif file.is_dir(follow_symlinks=False):
|
||||
more_files = self.recursive_scandir(
|
||||
file.path,
|
||||
|
|
Loading…
Reference in a new issue