File size: 2,443 Bytes
0ccd0c9
4c95ae9
f38ddbf
 
 
0ccd0c9
 
 
 
 
 
 
 
 
4c95ae9
0ccd0c9
 
 
 
 
 
 
 
 
 
 
 
4c95ae9
0ccd0c9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f38ddbf
0ccd0c9
 
 
 
 
 
 
f38ddbf
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
import os
import gradio as gr
import requests
from http import HTTPStatus
import json
from langchain.llms import Tongyi
from langchain import hub
from langchain_community.tools.tavily_search import TavilySearchResults
from langchain.tools import  tool
from langchain.embeddings import TensorflowHubEmbeddings
from pinecone import Pinecone, ServerlessSpec
from langchain.vectorstores import Pinecone as Pinecone_VectorStore
from langchain.tools.retriever import create_retriever_tool
from langchain.agents import AgentExecutor,create_react_agent

os.environ['TAVILY_API_KEY'] = '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'
@tool
def tqyb(query: str) -> str:
    """这是天气预报api,示例query=北京"""
    url=f"https://api.seniverse.com/v3/weather/now.json?key=SWtPLxs4A2GhenWC-&location={query}&language=zh-Hans&unit=c"
    response = requests.get(url)    
    # 检查请求是否成功  
    if response.status_code == 200:  
        res=response.json()
        return res  # 假设API返回的是JSON格式数据  
    else:  
        return f"请求失败,状态码:{response.status_code}"


llm = Tongyi(dashscope_api_key="sk-78c45d761ed04af2b965b43cd522108b",model="qwen-72b-chat")
prompt = hub.pull("hwchase17/react")
search = TavilySearchResults(max_results=1)

embeddings = TensorflowHubEmbeddings()
pc = Pinecone(api_key='3538cd3c-eca8-4c61-9463-759f5ea65b10')
index = pc.Index("myindex")
vectorstore = Pinecone_VectorStore(index, embeddings.embed_query, "text")
db=vectorstore.as_retriever()
retriever_tool = create_retriever_tool(
    db,
    "shuangcheng_search",
    "关于双城的区情信息检索工具,如果问题与双城的区情有关,你必须使用这个工具!",
)

tools = [search,tqyb,retriever_tool]
agent = create_react_agent(llm, tools, prompt)
agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True)

async def predict(question):
    que={"input":question}
    res=agent_executor.invoke(que)
    if res:
        return(res["output"])
    else:print("不好意思,出了一个小问题,请联系我的微信:13603634456")
    
	
gr.Interface(
    predict,inputs="textbox",
    outputs="textbox",
    title="定制版AI专家BOT",
    description="这是一个定制版的AI专家BOT,你可以通过输入问题,让AI为你回答。\n目前提供三个示例工具:\n1.天气预报(函数调用API)\n2.双城区情检索(增强型检索RAG)\n3.搜索引擎").launch()