Fix generate_checksum routine to avoid MemoryError crash (#649)
with very large files and 32-bit OS Issue #10690
This commit is contained in:
parent
fd399323a6
commit
2aec83f057
@ -251,9 +251,15 @@ def create_checksum(full_path):
|
|||||||
Create a md5 hash for the given file.
|
Create a md5 hash for the given file.
|
||||||
"""
|
"""
|
||||||
full_path = os.path.normpath(full_path)
|
full_path = os.path.normpath(full_path)
|
||||||
|
md5 = hashlib.md5()
|
||||||
try:
|
try:
|
||||||
with open(full_path, 'rb') as media_file:
|
with open(full_path, 'rb') as media_file:
|
||||||
md5sum = hashlib.md5(media_file.read()).hexdigest()
|
while True:
|
||||||
|
buf = media_file.read(65536)
|
||||||
|
if not buf:
|
||||||
|
break
|
||||||
|
md5.update(buf)
|
||||||
|
md5sum = md5.hexdigest()
|
||||||
except IOError:
|
except IOError:
|
||||||
md5sum = ''
|
md5sum = ''
|
||||||
except UnicodeEncodeError:
|
except UnicodeEncodeError:
|
||||||
|
Loading…
Reference in New Issue
Block a user