Skip to content

Commit

Permalink
ollama patch
Browse files Browse the repository at this point in the history
  • Loading branch information
binary-husky committed Dec 18, 2024
1 parent f43ef90 commit 72b2ce9
Showing 1 changed file with 4 additions and 2 deletions.
6 changes: 4 additions & 2 deletions request_llms/bridge_ollama.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def predict_no_ui_long_connection(inputs, llm_kwargs, history=[], sys_prompt="",
# make a POST request to the API endpoint, stream=False
from .bridge_all import model_info
endpoint = model_info[llm_kwargs['llm_model']]['endpoint']
response = requests.post(endpoint, headers=headers, proxies=proxies,
response = requests.post(endpoint, headers=headers, proxies=None,
json=payload, stream=True, timeout=TIMEOUT_SECONDS); break
except requests.exceptions.ReadTimeout as e:
retry += 1
Expand Down Expand Up @@ -152,10 +152,12 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
history.append(inputs); history.append("")

retry = 0
if proxies is not None:
logger.error("Ollama不会使用代理服务器, 忽略了proxies的设置。")
while True:
try:
# make a POST request to the API endpoint, stream=True
response = requests.post(endpoint, headers=headers, proxies=proxies,
response = requests.post(endpoint, headers=headers, proxies=None,
json=payload, stream=True, timeout=TIMEOUT_SECONDS);break
except:
retry += 1
Expand Down

0 comments on commit 72b2ce9

Please sign in to comment.