|
import sys |
|
import os |
|
|
|
|
|
current_dir = os.path.dirname(os.path.abspath(__file__)) |
|
project_root = os.path.dirname(current_dir) |
|
if project_root not in sys.path: |
|
sys.path.append(project_root) |
|
|
|
from qa_chain.Chat_QA_chain_self import Chat_QA_chain_self |
|
from dotenv import load_dotenv |
|
|
|
|
|
load_dotenv() |
|
|
|
chat_history:list=[] |
|
|
|
|
|
def chat_with_ai(): |
|
print("正在初始化AI助手...") |
|
chat_qa = Chat_QA_chain_self( |
|
model="qwen-max", |
|
temperature=0.7, |
|
top_k=4, |
|
chat_history=chat_history, |
|
persist_path="./vector_db/chroma_sanguo", |
|
file_path="./knowledge_db/sanguo_characters", |
|
api_key=os.getenv("ali_api_key"), |
|
embedding="m3e", |
|
embedding_key=None |
|
) |
|
print("AI助手初始化完成,可以开始对话了!(输入'退出'结束对话)") |
|
|
|
while True: |
|
user_input = input("\n您: ") |
|
if user_input.lower() in ["退出", "exit", "quit"]: |
|
break |
|
|
|
history = chat_qa.answer(user_input, chat_history=chat_history) |
|
chat_history[:] = history |
|
print("\n完整对话历史:") |
|
for i, (q, a) in enumerate(history, 1): |
|
print(f"{i}. 您: {q}") |
|
print(f" AI: {a}\n") |
|
print("\nAI助手最新回复:", history[-1][1]) |
|
|
|
if __name__ == "__main__": |
|
chat_with_ai() |
|
|