极简的chatbot,使用gradio和openai

发布于:2024-08-10 ⋅ 阅读:(98) ⋅ 点赞:(0)

主要用来方便测试接口。
参考:creating-a-chatbot-fast

import os
import gradio as gr
from openai import OpenAI

client = OpenAI(
    api_key=os.getenv("DASHSCOPE_API_KEY"),  # 如果您没有配置环境变量,请在此处用您的API Key进行替换
    base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",  # 填写DashScope SDK的base_url
)

MODEL = "qwen-max"


def predict_stream(message, history):
    history_openai_format = []
    for human, assistant in history:
        history_openai_format.append({"role": "user", "content": human})
        history_openai_format.append({"role": "assistant", "content": assistant})
    history_openai_format.append({"role": "user", "content": message})

    response = client.chat.completions.create(
        model=MODEL, messages=history_openai_format, temperature=1.0, stream=True
    )

    partial_message = ""
    for chunk in response:
        if chunk.choices[0].delta.content is not None:
            partial_message = partial_message + chunk.choices[0].delta.content
            yield partial_message


def predict(message, history):
    history_openai_format = []
    for human, assistant in history:
        history_openai_format.append({"role": "user", "content": human})
        history_openai_format.append({"role": "assistant", "content": assistant})
    history_openai_format.append({"role": "user", "content": message})

    response = client.chat.completions.create(
        model=MODEL, messages=history_openai_format, temperature=1.0, stream=False
    )
    return response.choices[0].message.content


gr.ChatInterface(predict).launch(server_name="127.0.0.1", server_port=8000, inbrowser=True)

网站公告

今日签到

点亮在社区的每一天
去签到