File size: 3,695 Bytes
522e6c3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95

import os
import config
if "LOCALLLM" in os.environ and os.environ["LOCALLLM"]:
    from ollama import chat as OllamaChat

#################################################

from langchain_community.tools import DuckDuckGoSearchRun

def callWebSearch(query):
    return DuckDuckGo(query)

def DuckDuckGo(query):
    search_tool = DuckDuckGoSearchRun()
    results = search_tool.invoke(query)
    return results

#################################################

from langchain_community.tools import WikipediaQueryRun
from langchain_community.utilities import WikipediaAPIWrapper
import requests
from bs4 import BeautifulSoup

def callWikipediaSearch(query):
    return callWikipediaLangchain(query)

def callWikipediaLangchain(query):
    wikipedia = WikipediaQueryRun(api_wrapper=WikipediaAPIWrapper())
    response = wikipedia.run(query)
    return response

def callCustomWikipediaSearch(query):
    searchURL = config.wikipediaSearchURL + query
    response = requests.get(searchURL, timeout=60)
    response.raise_for_status()
    searchResult = response.json()
    for pageId in searchResult['query']['pages']:
        if searchResult['query']['pages'][pageId]['index'] == 1:
            page = searchResult['query']['pages'][pageId]['title']
            # response3 = requests.get('https://en.wikipedia.org/w/api.php',
            #                          params={'action': 'parse','page': page,'format': 'json'}).json()
            # raw_html3 = response3['parse']['text']['*']
            response2 = requests.get(config.wikipediaRetrieveURL + page)
            response2.raise_for_status()
            raw_html = response2.text
            soup = BeautifulSoup(raw_html, 'html.parser')
            raw_text = soup.get_text(separator=" ",strip=True)
            return raw_text
            # response2 = requests.get(config.wikipediaRetrieveURL + page)
            # response2.raise_for_status()
            # return response2.text
            # response2 = requests.get('https://en.wikipedia.org/w/api.php',
            #                          params={'action': 'query', 'format': 'json', 'titles': page, 'prop': 'extracts', 'exintro': True, 'explaintext': True })
            # searchResult2 = response.json()
            # for pageId2 in searchResult2['query']['pages']:
            #     if searchResult2['query']['pages'][pageId2]['index'] == 1:
            #         return searchResult2['query']['pages'][pageId2]['extract']
    return "No result found in wikipedia. Search elsewhere!!"

#################################################

from huggingface_hub import InferenceClient

def callLLM(query):
    if "LOCALLLM" in os.environ:
        return callLocalLLM(query)
    else:
        return callHfInferenceClientLLM(query)

def callLocalLLM(query):
    response = OllamaChat(model=os.environ["LOCALLLM"], messages=[ { 'role': 'user', 'content': query } ])
    return response['message']['content']

def callHfInferenceClientLLM(query):
    client = InferenceClient(config.hfMoldel)
    response = client.chat.completions.create(
        messages = [ {"role": "user", "content": query } ],
        stream=False, max_tokens=1024 )
    return response.choices[0].message.content

#################################################

if __name__ == "__main__":
    os.environ["LOCALLLM"] = "llama3.2"
    # from ollama import chat as OllamaChat
    # response = callLLM("What is the capital of France?")
    # print(response)
    # response = callWebSearch("who is the president of France")
    # print(response)
    # response = callHfInferenceClientLLM("What is the capital of France?")
    # print(response)
    print(callWikipediaSearch("Mercedes Sosa discography"))