diff --git a/2024.12.11_ollama/ollama_with_python.py b/2024.12.11_ollama/ollama_with_python.py new file mode 100644 index 0000000..5e12fd2 --- /dev/null +++ b/2024.12.11_ollama/ollama_with_python.py @@ -0,0 +1,10 @@ +# 直接输出 +import ollama +response = ollama.chat(model="llama3.2:latest", messages=[{"role": "user","content": "你好"}], stream=False) +print(response['message']['content']) + +# 流式输出 +import ollama +response = ollama.chat(model="llama3.2:latest", messages=[{"role": "user", "content": "你好"}], stream=True) +for part in response: + print(part['message']['content'], end='', flush=True) \ No newline at end of file