From 65ae0be5188b837cb3fd0e662c1f80dc39e2f50b Mon Sep 17 00:00:00 2001 From: guanjihuan Date: Sat, 22 Feb 2025 00:41:33 +0800 Subject: [PATCH] 0.1.145 --- PyPI/setup.cfg | 2 +- PyPI/src/guan.egg-info/PKG-INFO | 2 +- PyPI/src/guan/data_processing.py | 84 +++++++++----------------------- 3 files changed, 26 insertions(+), 62 deletions(-) diff --git a/PyPI/setup.cfg b/PyPI/setup.cfg index d5b6410..db11fec 100644 --- a/PyPI/setup.cfg +++ b/PyPI/setup.cfg @@ -1,7 +1,7 @@ [metadata] # replace with your username: name = guan -version = 0.1.144 +version = 0.1.145 author = guanjihuan author_email = guanjihuan@163.com description = An open source python package diff --git a/PyPI/src/guan.egg-info/PKG-INFO b/PyPI/src/guan.egg-info/PKG-INFO index 8df5b3b..3cdd12b 100644 --- a/PyPI/src/guan.egg-info/PKG-INFO +++ b/PyPI/src/guan.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 2.2 Name: guan -Version: 0.1.144 +Version: 0.1.145 Summary: An open source python package Home-page: https://py.guanjihuan.com Author: guanjihuan diff --git a/PyPI/src/guan/data_processing.py b/PyPI/src/guan/data_processing.py index 71463c1..a60c6e8 100644 --- a/PyPI/src/guan/data_processing.py +++ b/PyPI/src/guan/data_processing.py @@ -1,73 +1,37 @@ # Module: data_processing # AI 对话 -def chat(prompt='你好', stream=1, model=1): - import socket - import json - import time - import guan - with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as client_socket: - client_socket.settimeout(30) - client_socket.connect(('ollama.guanjihuan.com', 12301)) - split_text_list = guan.split_text(prompt, width=100) - message_times = len(split_text_list) - if message_times == 1 or message_times == 0: - message = { - 'server': "ollama.guanjihuan.com", - 'prompt': prompt, - 'model': model, - } - send_message = json.dumps(message) - client_socket.send(send_message.encode('utf-8')) - else: - end_message = 0 - for i0 in range(message_times): - if i0 == message_times-1: - end_message = 1 - prompt_0 = split_text_list[i0] - message = { - 'server': "ollama.guanjihuan.com", - 'prompt': prompt_0, - 'model': model, - 'end_message': end_message, - } - send_message = json.dumps(message) - client_socket.send(send_message.encode('utf-8')) - time.sleep(0.2) - if stream == 1: - print('\n--- Start Chat Stream Message ---\n') - response = '' - while True: - if prompt == '': - break - try: - data = client_socket.recv(1024) - if data != b'': - response_data = data.decode() - response_dict = json.loads(response_data) - stream_response = response_dict['stream_response'] - response += stream_response - end_message = response_dict['end_message'] - if end_message == 1: - break - else: - if stream == 1: - print(stream_response, end='', flush=True) - except: - break - # client_socket.close() - if stream == 1: - print('\n\n--- End Chat Stream Message ---\n') +def chat(prompt='你好', model=1, stream=1): + import requests + url = "http://api.guanjihuan.com/chat" + data = { + "prompt": prompt, + "model": model, + } + if stream == 1: + print('\n--- Start Chat Stream Message ---\n') + requests_response = requests.post(url, json=data, stream=True) + response = '' + if requests_response.status_code == 200: + for line in requests_response.iter_lines(): + if line: + if stream == 1: + print(line.decode('utf-8'), end='', flush=True) + response += line.decode('utf-8') + else: + pass + if stream == 1: + print('\n\n--- End Chat Stream Message ---\n') return response # 加上函数代码的 AI 对话 -def chat_with_function_code(function_name, prompt='', stream=1, model=1): +def chat_with_function_code(function_name, prompt='', model=1, stream=1): import guan function_source = guan.get_source(function_name) if prompt == '': - response = guan.chat(prompt=function_source, stream=stream, model=model) + response = guan.chat(prompt=function_source, model=model, stream=stream) else: - response = guan.chat(prompt=function_source+'\n\n'+prompt, stream=stream, model=model) + response = guan.chat(prompt=function_source+'\n\n'+prompt, model=model, stream=stream) return response # 机器人自动对话