From cc54e19b2052d1f003637132d834bb4a2f2562d5 Mon Sep 17 00:00:00 2001 From: Adones Pitogo Date: Tue, 11 Jul 2023 13:31:50 +0800 Subject: build: fix generation of large .vdi images Instead of loading the whole image into the memory when generating the sha256 sum, we load the file in chunks and update the hash incrementally to avoid MemoryError in python. Also remove a stray empty line. Fixes: #13056 Signed-off-by: Adones Pitogo (mention empty line removal, adds Fixes from PR) Signed-off-by: Christian Lamparter (cherry picked from commit bdb4b78210cfb6bc8a6cda62fc990dd45ec3054c) --- scripts/json_add_image_info.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) (limited to 'scripts') diff --git a/scripts/json_add_image_info.py b/scripts/json_add_image_info.py index 9aa2a19e45..056f0ccb5c 100755 --- a/scripts/json_add_image_info.py +++ b/scripts/json_add_image_info.py @@ -13,7 +13,6 @@ if len(argv) != 2: json_path = Path(argv[1]) file_path = Path(getenv("FILE_DIR")) / getenv("FILE_NAME") - if not file_path.is_file(): print("Skip JSON creation for non existing file", file_path) exit(0) @@ -37,7 +36,14 @@ def get_titles(): device_id = getenv("DEVICE_ID") -hash_file = hashlib.sha256(file_path.read_bytes()).hexdigest() + +sha256_hash = hashlib.sha256() +with open(str(file_path),"rb") as f: + # Read and update hash string value in blocks of 4K + for byte_block in iter(lambda: f.read(4096),b""): + sha256_hash.update(byte_block) + +hash_file = sha256_hash.hexdigest() if file_path.with_suffix(file_path.suffix + ".sha256sum").exists(): hash_unsigned = ( -- cgit v1.2.3