-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathchat.py
59 lines (50 loc) · 2.1 KB
/
chat.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
import gradio as gr
import random
import time
from langchain.chat_models import ChatOpenAI
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
from langchain import PromptTemplate
from langchain import LLMChain
# 全局变量
REPORT = ''
STRUCTURE = ''
# 定义聊天机器人的回答函数
def get_response(message, chat_history):
global REPORT, STRUCTURE
if REPORT == "":
return "请先上传项目文件,获取部署报告。"
qa_template = """
You are an expert of AWS EC2 management.
The following tutorial in Markdown format is generated by you to guide the user to deploy his/her application to AWS EC2
Now, the user has some follow up questions for the tutorial.
Answer the question based on the tutorial contents and conversation history.
Be sure to give concise answer.
The ultimate goal is to help user deploy the code successfully on AWS EC2.
tutorial contents: {tutorial}
application outline: {outline}
conversation_history: {history}
"""
LLM = ChatOpenAI(model_name="gpt-3.5-turbo",
openai_api_key="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",
streaming=True,
callbacks=[StreamingStdOutCallbackHandler()],
temperature=0.7,
)
qa_prompt = PromptTemplate(template=qa_template,
input_variables=['tutorial', 'outline', 'history'])
qa_chain = LLMChain(llm=LLM, prompt=qa_prompt)
query_new = ""
for query, answer in chat_history:
query_new += f"user: {query}, bot: {answer}\n"
query_new += f"user: {message}, bot:"
response = qa_chain({"tutorial": REPORT, "outline": STRUCTURE, "history": query_new})['text']
return response
with gr.Blocks() as demo:
chatbot = gr.Chatbot()
msg = gr.Textbox()
def respond(message, chat_history):
bot_message = get_response(message, chat_history)
chat_history.append((message, bot_message))
return "", chat_history
msg.submit(respond, [msg, chatbot], [msg, chatbot])
# demo.launch(share=True)