Skip to content

Commit bca9ad9

Browse files
authored
minor : fix whitespaces (#1302)
1 parent e2a937c commit bca9ad9

File tree

2 files changed

+78
-79
lines changed

2 files changed

+78
-79
lines changed

README.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -388,7 +388,7 @@ python3 .\scripts\verify-checksum-models.py
388388
```
389389

390390
- On linux or macOS it is also possible to run the following commands to verify if you have all possible latest files in your self-installed `./models` subdirectory:
391-
- On Linux: `sha256sum --ignore-missing -c SHA256SUMS`
391+
- On Linux: `sha256sum --ignore-missing -c SHA256SUMS`
392392
- on macOS: `shasum -a 256 --ignore-missing -c SHA256SUMS`
393393

394394
### Seminal papers and background on the models

scripts/verify-checksum-models.py

+77-78
Original file line numberDiff line numberDiff line change
@@ -1,78 +1,77 @@
1-
import os
2-
import hashlib
3-
4-
def sha256sum(file):
5-
block_size = 16 * 1024 * 1024 # 16 MB block size
6-
b = bytearray(block_size)
7-
file_hash = hashlib.sha256()
8-
mv = memoryview(b)
9-
with open(file, 'rb', buffering=0) as f:
10-
while True:
11-
n = f.readinto(mv)
12-
if not n:
13-
break
14-
file_hash.update(mv[:n])
15-
16-
return file_hash.hexdigest()
17-
18-
# Define the path to the llama directory (parent folder of script directory)
19-
llama_path = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
20-
21-
# Define the file with the list of hashes and filenames
22-
hash_list_file = os.path.join(llama_path, "SHA256SUMS")
23-
24-
# Check if the hash list file exists
25-
if not os.path.exists(hash_list_file):
26-
print(f"Hash list file not found: {hash_list_file}")
27-
exit(1)
28-
29-
# Read the hash file content and split it into an array of lines
30-
with open(hash_list_file, "r") as f:
31-
hash_list = f.read().splitlines()
32-
33-
# Create an array to store the results
34-
results = []
35-
36-
# Loop over each line in the hash list
37-
for line in hash_list:
38-
# Split the line into hash and filename
39-
hash_value, filename = line.split(" ")
40-
41-
# Get the full path of the file by joining the llama path and the filename
42-
file_path = os.path.join(llama_path, filename)
43-
44-
# Informing user of the progress of the integrity check
45-
print(f"Verifying the checksum of {file_path}")
46-
47-
# Check if the file exists
48-
if os.path.exists(file_path):
49-
# Calculate the SHA256 checksum of the file using hashlib
50-
file_hash = sha256sum(file_path)
51-
52-
# Compare the file hash with the expected hash
53-
if file_hash == hash_value:
54-
valid_checksum = "V"
55-
file_missing = ""
56-
else:
57-
valid_checksum = ""
58-
file_missing = ""
59-
else:
60-
valid_checksum = ""
61-
file_missing = "X"
62-
63-
# Add the results to the array
64-
results.append({
65-
"filename": filename,
66-
"valid checksum": valid_checksum,
67-
"file missing": file_missing
68-
})
69-
70-
71-
# Print column headers for results table
72-
print("\n" + "filename".ljust(40) + "valid checksum".center(20) + "file missing".center(20))
73-
print("-" * 80)
74-
75-
# Output the results as a table
76-
for r in results:
77-
print(f"{r['filename']:40} {r['valid checksum']:^20} {r['file missing']:^20}")
78-
1+
import os
2+
import hashlib
3+
4+
def sha256sum(file):
5+
block_size = 16 * 1024 * 1024 # 16 MB block size
6+
b = bytearray(block_size)
7+
file_hash = hashlib.sha256()
8+
mv = memoryview(b)
9+
with open(file, 'rb', buffering=0) as f:
10+
while True:
11+
n = f.readinto(mv)
12+
if not n:
13+
break
14+
file_hash.update(mv[:n])
15+
16+
return file_hash.hexdigest()
17+
18+
# Define the path to the llama directory (parent folder of script directory)
19+
llama_path = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
20+
21+
# Define the file with the list of hashes and filenames
22+
hash_list_file = os.path.join(llama_path, "SHA256SUMS")
23+
24+
# Check if the hash list file exists
25+
if not os.path.exists(hash_list_file):
26+
print(f"Hash list file not found: {hash_list_file}")
27+
exit(1)
28+
29+
# Read the hash file content and split it into an array of lines
30+
with open(hash_list_file, "r") as f:
31+
hash_list = f.read().splitlines()
32+
33+
# Create an array to store the results
34+
results = []
35+
36+
# Loop over each line in the hash list
37+
for line in hash_list:
38+
# Split the line into hash and filename
39+
hash_value, filename = line.split(" ")
40+
41+
# Get the full path of the file by joining the llama path and the filename
42+
file_path = os.path.join(llama_path, filename)
43+
44+
# Informing user of the progress of the integrity check
45+
print(f"Verifying the checksum of {file_path}")
46+
47+
# Check if the file exists
48+
if os.path.exists(file_path):
49+
# Calculate the SHA256 checksum of the file using hashlib
50+
file_hash = sha256sum(file_path)
51+
52+
# Compare the file hash with the expected hash
53+
if file_hash == hash_value:
54+
valid_checksum = "V"
55+
file_missing = ""
56+
else:
57+
valid_checksum = ""
58+
file_missing = ""
59+
else:
60+
valid_checksum = ""
61+
file_missing = "X"
62+
63+
# Add the results to the array
64+
results.append({
65+
"filename": filename,
66+
"valid checksum": valid_checksum,
67+
"file missing": file_missing
68+
})
69+
70+
71+
# Print column headers for results table
72+
print("\n" + "filename".ljust(40) + "valid checksum".center(20) + "file missing".center(20))
73+
print("-" * 80)
74+
75+
# Output the results as a table
76+
for r in results:
77+
print(f"{r['filename']:40} {r['valid checksum']:^20} {r['file missing']:^20}")

0 commit comments

Comments
 (0)