use file digest
This commit is contained in:
parent
98b231a221
commit
3b4e4d7113
1 changed files with 2 additions and 3 deletions
|
@ -47,9 +47,8 @@ class find_duplicate_files:
|
||||||
if file.is_file():
|
if file.is_file():
|
||||||
hash = hashlib.sha256()
|
hash = hashlib.sha256()
|
||||||
with open(file.path, 'r') as file_pointer:
|
with open(file.path, 'r') as file_pointer:
|
||||||
file_content = file_pointer.read()
|
digest = hashlib.file_digest(f, "sha256")
|
||||||
hash.update(file_content.encode('utf-8'))
|
files[digest.hexdigest()] = file.path
|
||||||
files[hash.hexdigest()] = file.path
|
|
||||||
elif file.is_dir(follow_symlinks=False):
|
elif file.is_dir(follow_symlinks=False):
|
||||||
more_files = self.recursive_scandir(
|
more_files = self.recursive_scandir(
|
||||||
file.path,
|
file.path,
|
||||||
|
|
Loading…
Reference in a new issue