update
This commit is contained in:
parent
5ead9ccc0c
commit
8671e13661
22
README.md
22
README.md
@ -151,7 +151,7 @@ python -m streamlit run ./Qwen_Turbo.py --theme.base dark --server.port 8501
|
|||||||
|
|
||||||
运行命令:
|
运行命令:
|
||||||
```
|
```
|
||||||
python -m streamlit run ./Hunyuan Lite.py --theme.base dark --server.port 8501
|
python -m streamlit run ./Hunyuan_Lite.py --theme.base dark --server.port 8501
|
||||||
```
|
```
|
||||||
|
|
||||||
#### 4. 讯飞 - 星火大模型
|
#### 4. 讯飞 - 星火大模型
|
||||||
@ -164,12 +164,28 @@ python -m streamlit run ./Hunyuan Lite.py --theme.base dark --server.port 8501
|
|||||||
python -m streamlit run ./星火大模型.py --theme.base dark --server.port 8501
|
python -m streamlit run ./星火大模型.py --theme.base dark --server.port 8501
|
||||||
```
|
```
|
||||||
|
|
||||||
#### 5. 百度 - 零一万物的开源模型 Yi-34B-Chat
|
#### 5. 百度 - ERNIE_Speed_128K
|
||||||
|
|
||||||
百度千帆大模型平台 API key 获取(有的收费,有的免费):https://console.bce.baidu.com/qianfan/overview
|
百度千帆大模型平台 API key 获取(有的收费,有的免费):https://console.bce.baidu.com/qianfan/overview
|
||||||
|
|
||||||
运行命令:
|
运行命令:
|
||||||
|
|
||||||
```
|
```
|
||||||
python -m streamlit run ./Yi_34B_Chat.py --theme.base dark --server.port 8501
|
python -m streamlit run ./ERNIE_Speed_128K.py --theme.base dark --server.port 8501
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 5. 零一万物 - Yi_Spark
|
||||||
|
|
||||||
|
零一万物大模型开放平台(有免费额度):https://platform.lingyiwanwu.com
|
||||||
|
|
||||||
|
需要安装 OpenAI 软件包:
|
||||||
|
|
||||||
|
```
|
||||||
|
pip install openai
|
||||||
|
```
|
||||||
|
|
||||||
|
运行命令:
|
||||||
|
|
||||||
|
```
|
||||||
|
python -m streamlit run ./Yi_Spark.py --theme.base dark --server.port 8501
|
||||||
```
|
```
|
@ -48,11 +48,11 @@ for ai_response in st.session_state.ai_response:
|
|||||||
|
|
||||||
prompt_placeholder = st.chat_message("user", avatar='user')
|
prompt_placeholder = st.chat_message("user", avatar='user')
|
||||||
with st.chat_message("robot", avatar="assistant"):
|
with st.chat_message("robot", avatar="assistant"):
|
||||||
message_placeholder_yi_34b = st.empty()
|
message_placeholder = st.empty()
|
||||||
|
|
||||||
def response_of_yi_34b(prompt):
|
def response_of_ernie_speed_128k(prompt):
|
||||||
st.session_state.messages.append({'role': "user", 'content': prompt})
|
st.session_state.messages.append({'role': "user", 'content': prompt})
|
||||||
url = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/yi_34b_chat?access_token=" + get_access_token()
|
url = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-speed-128k?access_token=" + get_access_token()
|
||||||
payload = json.dumps({
|
payload = json.dumps({
|
||||||
"messages": st.session_state.messages,
|
"messages": st.session_state.messages,
|
||||||
"top_p": top_p,
|
"top_p": top_p,
|
||||||
@ -66,7 +66,7 @@ def response_of_yi_34b(prompt):
|
|||||||
try:
|
try:
|
||||||
dict_data = json.loads(line.decode("UTF-8")[5:])
|
dict_data = json.loads(line.decode("UTF-8")[5:])
|
||||||
full_content += dict_data['result']
|
full_content += dict_data['result']
|
||||||
message_placeholder_yi_34b.markdown(full_content)
|
message_placeholder.markdown(full_content)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
if stop_button:
|
if stop_button:
|
||||||
@ -81,6 +81,6 @@ if prompt:
|
|||||||
st.session_state.ai_response.append({"role": "user", "content": prompt, "avatar": 'user'})
|
st.session_state.ai_response.append({"role": "user", "content": prompt, "avatar": 'user'})
|
||||||
stop = st.empty()
|
stop = st.empty()
|
||||||
stop_button = stop.button('停止', key='break_response')
|
stop_button = stop.button('停止', key='break_response')
|
||||||
response_of_yi_34b(prompt)
|
response_of_ernie_speed_128k(prompt)
|
||||||
stop.empty()
|
stop.empty()
|
||||||
button_clear = st.button("清空", on_click=clear_all, key='clear')
|
button_clear = st.button("清空", on_click=clear_all, key='clear')
|
77
模型API - 零一万物 - Yi_Spark/Yi_Spark.py
Normal file
77
模型API - 零一万物 - Yi_Spark/Yi_Spark.py
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
"""
|
||||||
|
This code is supported by the website: https://www.guanjihuan.com
|
||||||
|
The newest version of this code is on the web page: https://www.guanjihuan.com/archives/38502
|
||||||
|
"""
|
||||||
|
|
||||||
|
import streamlit as st
|
||||||
|
st.set_page_config(
|
||||||
|
page_title="Chat",
|
||||||
|
layout='wide'
|
||||||
|
)
|
||||||
|
|
||||||
|
import openai
|
||||||
|
API_BASE = "https://api.lingyiwanwu.com/v1"
|
||||||
|
API_KEY = "your key"
|
||||||
|
|
||||||
|
with st.sidebar:
|
||||||
|
with st.expander('参数', expanded=True):
|
||||||
|
top_p = st.slider('top_p', 0.01, 1.0, step=0.01, value=0.8, key='top_p_session')
|
||||||
|
temperature = st.slider('temperature', 0.51, 1.0, step=0.01, value=0.85, key='temperature_session')
|
||||||
|
def reset_parameter():
|
||||||
|
st.session_state['top_p_session'] = 0.8
|
||||||
|
st.session_state['temperature_session'] = 0.85
|
||||||
|
reset_parameter_button = st.button('重置', on_click=reset_parameter)
|
||||||
|
|
||||||
|
prompt = st.chat_input("在这里输入您的命令")
|
||||||
|
|
||||||
|
def clear_all():
|
||||||
|
st.session_state.messages = []
|
||||||
|
st.session_state.ai_response = []
|
||||||
|
|
||||||
|
if 'messages' not in st.session_state:
|
||||||
|
st.session_state.messages = []
|
||||||
|
if 'ai_response' not in st.session_state:
|
||||||
|
st.session_state.ai_response = []
|
||||||
|
|
||||||
|
for ai_response in st.session_state.ai_response:
|
||||||
|
with st.chat_message(ai_response["role"], avatar=ai_response.get("avatar")):
|
||||||
|
st.markdown(ai_response["content"])
|
||||||
|
|
||||||
|
prompt_placeholder = st.chat_message("user", avatar='user')
|
||||||
|
with st.chat_message("robot", avatar="assistant"):
|
||||||
|
message_placeholder = st.empty()
|
||||||
|
|
||||||
|
def response_of_yi(prompt):
|
||||||
|
st.session_state.messages.append({'role': 'user', 'content': prompt})
|
||||||
|
client = openai.OpenAI(
|
||||||
|
api_key=API_KEY,
|
||||||
|
base_url=API_BASE
|
||||||
|
)
|
||||||
|
completion = client.chat.completions.create(
|
||||||
|
model="yi-spark",
|
||||||
|
messages=st.session_state.messages,
|
||||||
|
stream=True,
|
||||||
|
temperature=temperature,
|
||||||
|
top_p=top_p,
|
||||||
|
)
|
||||||
|
full_content = ''
|
||||||
|
for chunk in completion:
|
||||||
|
response = chunk.choices[0].delta.content or ""
|
||||||
|
full_content += response
|
||||||
|
message_placeholder.markdown(full_content)
|
||||||
|
if stop_button:
|
||||||
|
break
|
||||||
|
st.session_state.messages.append({'role': 'assistant',
|
||||||
|
'content': full_content})
|
||||||
|
st.session_state.ai_response.append({"role": "robot", "content": full_content, "avatar": "assistant"})
|
||||||
|
return full_content
|
||||||
|
|
||||||
|
if prompt:
|
||||||
|
prompt_placeholder.markdown(prompt)
|
||||||
|
st.session_state.ai_response.append({"role": "user", "content": prompt, "avatar": 'user'})
|
||||||
|
stop = st.empty()
|
||||||
|
stop_button = stop.button('停止', key='break_response')
|
||||||
|
response_of_yi(prompt)
|
||||||
|
stop.empty()
|
||||||
|
button_clear = st.button("清空", on_click=clear_all, key='clear')
|
||||||
|
|
Loading…
x
Reference in New Issue
Block a user