From bca9ad938a2a43621cf406d993b755cc91728dd5 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 3 May 2023 20:09:42 +0300 Subject: minor : fix whitespaces (#1302) --- README.md | 2 +- scripts/verify-checksum-models.py | 155 +++++++++++++++++++------------------- 2 files changed, 78 insertions(+), 79 deletions(-) diff --git a/README.md b/README.md index de0a3de..0002f8c 100644 --- a/README.md +++ b/README.md @@ -388,7 +388,7 @@ python3 .\scripts\verify-checksum-models.py ``` - On linux or macOS it is also possible to run the following commands to verify if you have all possible latest files in your self-installed `./models` subdirectory: - - On Linux: `sha256sum --ignore-missing -c SHA256SUMS` + - On Linux: `sha256sum --ignore-missing -c SHA256SUMS` - on macOS: `shasum -a 256 --ignore-missing -c SHA256SUMS` ### Seminal papers and background on the models diff --git a/scripts/verify-checksum-models.py b/scripts/verify-checksum-models.py index 1f1b3d2..2ce5728 100644 --- a/scripts/verify-checksum-models.py +++ b/scripts/verify-checksum-models.py @@ -1,78 +1,77 @@ -import os -import hashlib - -def sha256sum(file): - block_size = 16 * 1024 * 1024 # 16 MB block size - b = bytearray(block_size) - file_hash = hashlib.sha256() - mv = memoryview(b) - with open(file, 'rb', buffering=0) as f: - while True: - n = f.readinto(mv) - if not n: - break - file_hash.update(mv[:n]) - - return file_hash.hexdigest() - -# Define the path to the llama directory (parent folder of script directory) -llama_path = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) - -# Define the file with the list of hashes and filenames -hash_list_file = os.path.join(llama_path, "SHA256SUMS") - -# Check if the hash list file exists -if not os.path.exists(hash_list_file): - print(f"Hash list file not found: {hash_list_file}") - exit(1) - -# Read the hash file content and split it into an array of lines -with open(hash_list_file, "r") as f: - hash_list = f.read().splitlines() - -# Create an array to store the results -results = [] - -# Loop over each line in the hash list -for line in hash_list: - # Split the line into hash and filename - hash_value, filename = line.split(" ") - - # Get the full path of the file by joining the llama path and the filename - file_path = os.path.join(llama_path, filename) - - # Informing user of the progress of the integrity check - print(f"Verifying the checksum of {file_path}") - - # Check if the file exists - if os.path.exists(file_path): - # Calculate the SHA256 checksum of the file using hashlib - file_hash = sha256sum(file_path) - - # Compare the file hash with the expected hash - if file_hash == hash_value: - valid_checksum = "V" - file_missing = "" - else: - valid_checksum = "" - file_missing = "" - else: - valid_checksum = "" - file_missing = "X" - - # Add the results to the array - results.append({ - "filename": filename, - "valid checksum": valid_checksum, - "file missing": file_missing - }) - - -# Print column headers for results table -print("\n" + "filename".ljust(40) + "valid checksum".center(20) + "file missing".center(20)) -print("-" * 80) - -# Output the results as a table -for r in results: - print(f"{r['filename']:40} {r['valid checksum']:^20} {r['file missing']:^20}") - +import os +import hashlib + +def sha256sum(file): + block_size = 16 * 1024 * 1024 # 16 MB block size + b = bytearray(block_size) + file_hash = hashlib.sha256() + mv = memoryview(b) + with open(file, 'rb', buffering=0) as f: + while True: + n = f.readinto(mv) + if not n: + break + file_hash.update(mv[:n]) + + return file_hash.hexdigest() + +# Define the path to the llama directory (parent folder of script directory) +llama_path = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) + +# Define the file with the list of hashes and filenames +hash_list_file = os.path.join(llama_path, "SHA256SUMS") + +# Check if the hash list file exists +if not os.path.exists(hash_list_file): + print(f"Hash list file not found: {hash_list_file}") + exit(1) + +# Read the hash file content and split it into an array of lines +with open(hash_list_file, "r") as f: + hash_list = f.read().splitlines() + +# Create an array to store the results +results = [] + +# Loop over each line in the hash list +for line in hash_list: + # Split the line into hash and filename + hash_value, filename = line.split(" ") + + # Get the full path of the file by joining the llama path and the filename + file_path = os.path.join(llama_path, filename) + + # Informing user of the progress of the integrity check + print(f"Verifying the checksum of {file_path}") + + # Check if the file exists + if os.path.exists(file_path): + # Calculate the SHA256 checksum of the file using hashlib + file_hash = sha256sum(file_path) + + # Compare the file hash with the expected hash + if file_hash == hash_value: + valid_checksum = "V" + file_missing = "" + else: + valid_checksum = "" + file_missing = "" + else: + valid_checksum = "" + file_missing = "X" + + # Add the results to the array + results.append({ + "filename": filename, + "valid checksum": valid_checksum, + "file missing": file_missing + }) + + +# Print column headers for results table +print("\n" + "filename".ljust(40) + "valid checksum".center(20) + "file missing".center(20)) +print("-" * 80) + +# Output the results as a table +for r in results: + print(f"{r['filename']:40} {r['valid checksum']:^20} {r['file missing']:^20}") -- cgit v1.2.3