From ebf86b30ebff77a92524efb3540e8cd3da3ee121 Mon Sep 17 00:00:00 2001 From: guanjihuan Date: Wed, 18 Dec 2024 04:58:51 +0800 Subject: [PATCH] Create ollama_with_python.py --- 2024.12.11_ollama/ollama_with_python.py | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 2024.12.11_ollama/ollama_with_python.py diff --git a/2024.12.11_ollama/ollama_with_python.py b/2024.12.11_ollama/ollama_with_python.py new file mode 100644 index 0000000..5e12fd2 --- /dev/null +++ b/2024.12.11_ollama/ollama_with_python.py @@ -0,0 +1,10 @@ +# 直接输出 +import ollama +response = ollama.chat(model="llama3.2:latest", messages=[{"role": "user","content": "你好"}], stream=False) +print(response['message']['content']) + +# 流式输出 +import ollama +response = ollama.chat(model="llama3.2:latest", messages=[{"role": "user", "content": "你好"}], stream=True) +for part in response: + print(part['message']['content'], end='', flush=True) \ No newline at end of file