Fixed the bug with additional bytes.

The issue seems to be with huggingface not reporting the entire size of the model.
Added an error message with instructions if the checksums don't match.
This commit is contained in:
Nikita Skakun 2023-03-30 12:52:16 -07:00
parent 297ac051d9
commit d550c12a3e

View File

@ -35,7 +35,7 @@ def get_file(url, output_folder):
# Check if the file has already been downloaded completely # Check if the file has already been downloaded completely
r = requests.head(url) r = requests.head(url)
total_size = int(r.headers.get('content-length', 0)) total_size = int(r.headers.get('content-length', 0))
if output_path.stat().st_size == total_size: if output_path.stat().st_size >= total_size:
return return
# Otherwise, resume the download from where it left off # Otherwise, resume the download from where it left off
headers = {'Range': f'bytes={output_path.stat().st_size}-'} headers = {'Range': f'bytes={output_path.stat().st_size}-'}
@ -215,7 +215,6 @@ if __name__ == '__main__':
print(f"Downloading the model to {output_folder}") print(f"Downloading the model to {output_folder}")
download_files(links, output_folder, args.threads) download_files(links, output_folder, args.threads)
print('\n')
# Validate the checksums # Validate the checksums
validated = True validated = True
for i in range(len(sha256)): for i in range(len(sha256)):
@ -228,3 +227,5 @@ if __name__ == '__main__':
if validated: if validated:
print('[+] Validated checksums of all model files!') print('[+] Validated checksums of all model files!')
else:
print('[-] Rerun the download-model.py with --clean flag')