use file digest

This commit is contained in:
Antonio J. Delgado 2023-01-25 14:46:45 +02:00
parent 98b231a221
commit 3b4e4d7113

View file

@ -47,9 +47,8 @@ class find_duplicate_files:
if file.is_file():
hash = hashlib.sha256()
with open(file.path, 'r') as file_pointer:
file_content = file_pointer.read()
hash.update(file_content.encode('utf-8'))
files[hash.hexdigest()] = file.path
digest = hashlib.file_digest(f, "sha256")
files[digest.hexdigest()] = file.path
elif file.is_dir(follow_symlinks=False):
more_files = self.recursive_scandir(
file.path,