0.1.145
This commit is contained in:
parent
e171790a15
commit
65ae0be518
@ -1,7 +1,7 @@
|
|||||||
[metadata]
|
[metadata]
|
||||||
# replace with your username:
|
# replace with your username:
|
||||||
name = guan
|
name = guan
|
||||||
version = 0.1.144
|
version = 0.1.145
|
||||||
author = guanjihuan
|
author = guanjihuan
|
||||||
author_email = guanjihuan@163.com
|
author_email = guanjihuan@163.com
|
||||||
description = An open source python package
|
description = An open source python package
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
Metadata-Version: 2.2
|
Metadata-Version: 2.2
|
||||||
Name: guan
|
Name: guan
|
||||||
Version: 0.1.144
|
Version: 0.1.145
|
||||||
Summary: An open source python package
|
Summary: An open source python package
|
||||||
Home-page: https://py.guanjihuan.com
|
Home-page: https://py.guanjihuan.com
|
||||||
Author: guanjihuan
|
Author: guanjihuan
|
||||||
|
@ -1,73 +1,37 @@
|
|||||||
# Module: data_processing
|
# Module: data_processing
|
||||||
|
|
||||||
# AI 对话
|
# AI 对话
|
||||||
def chat(prompt='你好', stream=1, model=1):
|
def chat(prompt='你好', model=1, stream=1):
|
||||||
import socket
|
import requests
|
||||||
import json
|
url = "http://api.guanjihuan.com/chat"
|
||||||
import time
|
data = {
|
||||||
import guan
|
"prompt": prompt,
|
||||||
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as client_socket:
|
"model": model,
|
||||||
client_socket.settimeout(30)
|
}
|
||||||
client_socket.connect(('ollama.guanjihuan.com', 12301))
|
if stream == 1:
|
||||||
split_text_list = guan.split_text(prompt, width=100)
|
print('\n--- Start Chat Stream Message ---\n')
|
||||||
message_times = len(split_text_list)
|
requests_response = requests.post(url, json=data, stream=True)
|
||||||
if message_times == 1 or message_times == 0:
|
response = ''
|
||||||
message = {
|
if requests_response.status_code == 200:
|
||||||
'server': "ollama.guanjihuan.com",
|
for line in requests_response.iter_lines():
|
||||||
'prompt': prompt,
|
if line:
|
||||||
'model': model,
|
if stream == 1:
|
||||||
}
|
print(line.decode('utf-8'), end='', flush=True)
|
||||||
send_message = json.dumps(message)
|
response += line.decode('utf-8')
|
||||||
client_socket.send(send_message.encode('utf-8'))
|
else:
|
||||||
else:
|
pass
|
||||||
end_message = 0
|
if stream == 1:
|
||||||
for i0 in range(message_times):
|
print('\n\n--- End Chat Stream Message ---\n')
|
||||||
if i0 == message_times-1:
|
|
||||||
end_message = 1
|
|
||||||
prompt_0 = split_text_list[i0]
|
|
||||||
message = {
|
|
||||||
'server': "ollama.guanjihuan.com",
|
|
||||||
'prompt': prompt_0,
|
|
||||||
'model': model,
|
|
||||||
'end_message': end_message,
|
|
||||||
}
|
|
||||||
send_message = json.dumps(message)
|
|
||||||
client_socket.send(send_message.encode('utf-8'))
|
|
||||||
time.sleep(0.2)
|
|
||||||
if stream == 1:
|
|
||||||
print('\n--- Start Chat Stream Message ---\n')
|
|
||||||
response = ''
|
|
||||||
while True:
|
|
||||||
if prompt == '':
|
|
||||||
break
|
|
||||||
try:
|
|
||||||
data = client_socket.recv(1024)
|
|
||||||
if data != b'':
|
|
||||||
response_data = data.decode()
|
|
||||||
response_dict = json.loads(response_data)
|
|
||||||
stream_response = response_dict['stream_response']
|
|
||||||
response += stream_response
|
|
||||||
end_message = response_dict['end_message']
|
|
||||||
if end_message == 1:
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
if stream == 1:
|
|
||||||
print(stream_response, end='', flush=True)
|
|
||||||
except:
|
|
||||||
break
|
|
||||||
# client_socket.close()
|
|
||||||
if stream == 1:
|
|
||||||
print('\n\n--- End Chat Stream Message ---\n')
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
# 加上函数代码的 AI 对话
|
# 加上函数代码的 AI 对话
|
||||||
def chat_with_function_code(function_name, prompt='', stream=1, model=1):
|
def chat_with_function_code(function_name, prompt='', model=1, stream=1):
|
||||||
import guan
|
import guan
|
||||||
function_source = guan.get_source(function_name)
|
function_source = guan.get_source(function_name)
|
||||||
if prompt == '':
|
if prompt == '':
|
||||||
response = guan.chat(prompt=function_source, stream=stream, model=model)
|
response = guan.chat(prompt=function_source, model=model, stream=stream)
|
||||||
else:
|
else:
|
||||||
response = guan.chat(prompt=function_source+'\n\n'+prompt, stream=stream, model=model)
|
response = guan.chat(prompt=function_source+'\n\n'+prompt, model=model, stream=stream)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
# 机器人自动对话
|
# 机器人自动对话
|
||||||
|
Loading…
x
Reference in New Issue
Block a user