diff options
author | Georgi Gerganov <ggerganov@gmail.com> | 2023-05-03 20:09:42 +0300 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-05-03 20:09:42 +0300 |
commit | bca9ad938a2a43621cf406d993b755cc91728dd5 (patch) | |
tree | f0c4d3c988a3e2015508b50c3f3afbf170d5d437 | |
parent | e2a937ca6abadc7e01e139db31e6db9dce16e3e9 (diff) |
minor : fix whitespaces (#1302)
-rw-r--r-- | README.md | 2 | ||||
-rw-r--r-- | scripts/verify-checksum-models.py | 155 |
2 files changed, 78 insertions, 79 deletions
@@ -388,7 +388,7 @@ python3 .\scripts\verify-checksum-models.py ``` - On linux or macOS it is also possible to run the following commands to verify if you have all possible latest files in your self-installed `./models` subdirectory: - - On Linux: `sha256sum --ignore-missing -c SHA256SUMS` + - On Linux: `sha256sum --ignore-missing -c SHA256SUMS` - on macOS: `shasum -a 256 --ignore-missing -c SHA256SUMS` ### Seminal papers and background on the models diff --git a/scripts/verify-checksum-models.py b/scripts/verify-checksum-models.py index 1f1b3d2..2ce5728 100644 --- a/scripts/verify-checksum-models.py +++ b/scripts/verify-checksum-models.py @@ -1,78 +1,77 @@ -import os
-import hashlib
-
-def sha256sum(file):
- block_size = 16 * 1024 * 1024 # 16 MB block size
- b = bytearray(block_size)
- file_hash = hashlib.sha256()
- mv = memoryview(b)
- with open(file, 'rb', buffering=0) as f:
- while True:
- n = f.readinto(mv)
- if not n:
- break
- file_hash.update(mv[:n])
-
- return file_hash.hexdigest()
-
-# Define the path to the llama directory (parent folder of script directory)
-llama_path = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
-
-# Define the file with the list of hashes and filenames
-hash_list_file = os.path.join(llama_path, "SHA256SUMS")
-
-# Check if the hash list file exists
-if not os.path.exists(hash_list_file):
- print(f"Hash list file not found: {hash_list_file}")
- exit(1)
-
-# Read the hash file content and split it into an array of lines
-with open(hash_list_file, "r") as f:
- hash_list = f.read().splitlines()
-
-# Create an array to store the results
-results = []
-
-# Loop over each line in the hash list
-for line in hash_list:
- # Split the line into hash and filename
- hash_value, filename = line.split(" ")
-
- # Get the full path of the file by joining the llama path and the filename
- file_path = os.path.join(llama_path, filename)
-
- # Informing user of the progress of the integrity check
- print(f"Verifying the checksum of {file_path}")
-
- # Check if the file exists
- if os.path.exists(file_path):
- # Calculate the SHA256 checksum of the file using hashlib
- file_hash = sha256sum(file_path)
-
- # Compare the file hash with the expected hash
- if file_hash == hash_value:
- valid_checksum = "V"
- file_missing = ""
- else:
- valid_checksum = ""
- file_missing = ""
- else:
- valid_checksum = ""
- file_missing = "X"
-
- # Add the results to the array
- results.append({
- "filename": filename,
- "valid checksum": valid_checksum,
- "file missing": file_missing
- })
-
-
-# Print column headers for results table
-print("\n" + "filename".ljust(40) + "valid checksum".center(20) + "file missing".center(20))
-print("-" * 80)
-
-# Output the results as a table
-for r in results:
- print(f"{r['filename']:40} {r['valid checksum']:^20} {r['file missing']:^20}")
-
+import os +import hashlib + +def sha256sum(file): + block_size = 16 * 1024 * 1024 # 16 MB block size + b = bytearray(block_size) + file_hash = hashlib.sha256() + mv = memoryview(b) + with open(file, 'rb', buffering=0) as f: + while True: + n = f.readinto(mv) + if not n: + break + file_hash.update(mv[:n]) + + return file_hash.hexdigest() + +# Define the path to the llama directory (parent folder of script directory) +llama_path = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) + +# Define the file with the list of hashes and filenames +hash_list_file = os.path.join(llama_path, "SHA256SUMS") + +# Check if the hash list file exists +if not os.path.exists(hash_list_file): + print(f"Hash list file not found: {hash_list_file}") + exit(1) + +# Read the hash file content and split it into an array of lines +with open(hash_list_file, "r") as f: + hash_list = f.read().splitlines() + +# Create an array to store the results +results = [] + +# Loop over each line in the hash list +for line in hash_list: + # Split the line into hash and filename + hash_value, filename = line.split(" ") + + # Get the full path of the file by joining the llama path and the filename + file_path = os.path.join(llama_path, filename) + + # Informing user of the progress of the integrity check + print(f"Verifying the checksum of {file_path}") + + # Check if the file exists + if os.path.exists(file_path): + # Calculate the SHA256 checksum of the file using hashlib + file_hash = sha256sum(file_path) + + # Compare the file hash with the expected hash + if file_hash == hash_value: + valid_checksum = "V" + file_missing = "" + else: + valid_checksum = "" + file_missing = "" + else: + valid_checksum = "" + file_missing = "X" + + # Add the results to the array + results.append({ + "filename": filename, + "valid checksum": valid_checksum, + "file missing": file_missing + }) + + +# Print column headers for results table +print("\n" + "filename".ljust(40) + "valid checksum".center(20) + "file missing".center(20)) +print("-" * 80) + +# Output the results as a table +for r in results: + print(f"{r['filename']:40} {r['valid checksum']:^20} {r['file missing']:^20}") |