mirror of
https://github.com/ggerganov/llama.cpp.git
synced 2024-12-24 10:24:35 +00:00
minor : fix whitespaces (#1302)
This commit is contained in:
parent
e2a937ca6a
commit
bca9ad938a
@ -388,7 +388,7 @@ python3 .\scripts\verify-checksum-models.py
|
|||||||
```
|
```
|
||||||
|
|
||||||
- On linux or macOS it is also possible to run the following commands to verify if you have all possible latest files in your self-installed `./models` subdirectory:
|
- On linux or macOS it is also possible to run the following commands to verify if you have all possible latest files in your self-installed `./models` subdirectory:
|
||||||
- On Linux: `sha256sum --ignore-missing -c SHA256SUMS`
|
- On Linux: `sha256sum --ignore-missing -c SHA256SUMS`
|
||||||
- on macOS: `shasum -a 256 --ignore-missing -c SHA256SUMS`
|
- on macOS: `shasum -a 256 --ignore-missing -c SHA256SUMS`
|
||||||
|
|
||||||
### Seminal papers and background on the models
|
### Seminal papers and background on the models
|
||||||
|
@ -1,78 +1,77 @@
|
|||||||
import os
|
import os
|
||||||
import hashlib
|
import hashlib
|
||||||
|
|
||||||
def sha256sum(file):
|
def sha256sum(file):
|
||||||
block_size = 16 * 1024 * 1024 # 16 MB block size
|
block_size = 16 * 1024 * 1024 # 16 MB block size
|
||||||
b = bytearray(block_size)
|
b = bytearray(block_size)
|
||||||
file_hash = hashlib.sha256()
|
file_hash = hashlib.sha256()
|
||||||
mv = memoryview(b)
|
mv = memoryview(b)
|
||||||
with open(file, 'rb', buffering=0) as f:
|
with open(file, 'rb', buffering=0) as f:
|
||||||
while True:
|
while True:
|
||||||
n = f.readinto(mv)
|
n = f.readinto(mv)
|
||||||
if not n:
|
if not n:
|
||||||
break
|
break
|
||||||
file_hash.update(mv[:n])
|
file_hash.update(mv[:n])
|
||||||
|
|
||||||
return file_hash.hexdigest()
|
return file_hash.hexdigest()
|
||||||
|
|
||||||
# Define the path to the llama directory (parent folder of script directory)
|
# Define the path to the llama directory (parent folder of script directory)
|
||||||
llama_path = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
|
llama_path = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
|
||||||
|
|
||||||
# Define the file with the list of hashes and filenames
|
# Define the file with the list of hashes and filenames
|
||||||
hash_list_file = os.path.join(llama_path, "SHA256SUMS")
|
hash_list_file = os.path.join(llama_path, "SHA256SUMS")
|
||||||
|
|
||||||
# Check if the hash list file exists
|
# Check if the hash list file exists
|
||||||
if not os.path.exists(hash_list_file):
|
if not os.path.exists(hash_list_file):
|
||||||
print(f"Hash list file not found: {hash_list_file}")
|
print(f"Hash list file not found: {hash_list_file}")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
# Read the hash file content and split it into an array of lines
|
# Read the hash file content and split it into an array of lines
|
||||||
with open(hash_list_file, "r") as f:
|
with open(hash_list_file, "r") as f:
|
||||||
hash_list = f.read().splitlines()
|
hash_list = f.read().splitlines()
|
||||||
|
|
||||||
# Create an array to store the results
|
# Create an array to store the results
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
# Loop over each line in the hash list
|
# Loop over each line in the hash list
|
||||||
for line in hash_list:
|
for line in hash_list:
|
||||||
# Split the line into hash and filename
|
# Split the line into hash and filename
|
||||||
hash_value, filename = line.split(" ")
|
hash_value, filename = line.split(" ")
|
||||||
|
|
||||||
# Get the full path of the file by joining the llama path and the filename
|
# Get the full path of the file by joining the llama path and the filename
|
||||||
file_path = os.path.join(llama_path, filename)
|
file_path = os.path.join(llama_path, filename)
|
||||||
|
|
||||||
# Informing user of the progress of the integrity check
|
# Informing user of the progress of the integrity check
|
||||||
print(f"Verifying the checksum of {file_path}")
|
print(f"Verifying the checksum of {file_path}")
|
||||||
|
|
||||||
# Check if the file exists
|
# Check if the file exists
|
||||||
if os.path.exists(file_path):
|
if os.path.exists(file_path):
|
||||||
# Calculate the SHA256 checksum of the file using hashlib
|
# Calculate the SHA256 checksum of the file using hashlib
|
||||||
file_hash = sha256sum(file_path)
|
file_hash = sha256sum(file_path)
|
||||||
|
|
||||||
# Compare the file hash with the expected hash
|
# Compare the file hash with the expected hash
|
||||||
if file_hash == hash_value:
|
if file_hash == hash_value:
|
||||||
valid_checksum = "V"
|
valid_checksum = "V"
|
||||||
file_missing = ""
|
file_missing = ""
|
||||||
else:
|
else:
|
||||||
valid_checksum = ""
|
valid_checksum = ""
|
||||||
file_missing = ""
|
file_missing = ""
|
||||||
else:
|
else:
|
||||||
valid_checksum = ""
|
valid_checksum = ""
|
||||||
file_missing = "X"
|
file_missing = "X"
|
||||||
|
|
||||||
# Add the results to the array
|
# Add the results to the array
|
||||||
results.append({
|
results.append({
|
||||||
"filename": filename,
|
"filename": filename,
|
||||||
"valid checksum": valid_checksum,
|
"valid checksum": valid_checksum,
|
||||||
"file missing": file_missing
|
"file missing": file_missing
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
# Print column headers for results table
|
# Print column headers for results table
|
||||||
print("\n" + "filename".ljust(40) + "valid checksum".center(20) + "file missing".center(20))
|
print("\n" + "filename".ljust(40) + "valid checksum".center(20) + "file missing".center(20))
|
||||||
print("-" * 80)
|
print("-" * 80)
|
||||||
|
|
||||||
# Output the results as a table
|
# Output the results as a table
|
||||||
for r in results:
|
for r in results:
|
||||||
print(f"{r['filename']:40} {r['valid checksum']:^20} {r['file missing']:^20}")
|
print(f"{r['filename']:40} {r['valid checksum']:^20} {r['file missing']:^20}")
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user