diff --git a/2024.12.11_ollama/ollama_with_python.py b/2024.12.11_ollama/ollama_with_python.py index 5b37a54..d07af26 100644 --- a/2024.12.11_ollama/ollama_with_python.py +++ b/2024.12.11_ollama/ollama_with_python.py @@ -9,8 +9,8 @@ response = ollama.chat(model="llama3.2:latest", messages=[{"role": "user", "cont for part in response: print(part['message']['content'], end='', flush=True) -# 流式输出,且模型后台常驻(需要手动 ollama stop 关闭) +# 流式输出,同时设置模型为后台常驻,需要手动 ollama stop 关闭 import ollama response = ollama.chat(model="llama3.2:latest", messages=[{"role": "user", "content": "你好"}], stream=True, keep_alive=-1) for part in response: - print(part['message']['content'], end='', flush=True) + print(part['message']['content'], end='', flush=True) \ No newline at end of file