Minor bug fix

This commit is contained in:
oobabooga 2023-11-16 18:39:39 -08:00
parent c0233bb9d3
commit 780b00e1cf

View File

@ -317,7 +317,7 @@ def update_requirements(initial_installation=False):
print(f"Uninstalled {package_name}") print(f"Uninstalled {package_name}")
# Uninstall previous llama-cpp-python versions # Uninstall previous llama-cpp-python versions
run_cmd("python -m pip uninstall -y llama-cpp-python-cuda" + package_name, environment=True) run_cmd("python -m pip uninstall -y llama-cpp-python-cuda", environment=True)
# Make sure that API requirements are installed (temporary) # Make sure that API requirements are installed (temporary)
extension_req_path = os.path.join("extensions", "openai", "requirements.txt") extension_req_path = os.path.join("extensions", "openai", "requirements.txt")