|
|
|
import os |
|
import config |
|
if "LOCALLLM" in os.environ and os.environ["LOCALLLM"]: |
|
from ollama import chat as OllamaChat |
|
|
|
|
|
|
|
from langchain_community.tools import DuckDuckGoSearchRun |
|
|
|
def callWebSearch(query): |
|
return DuckDuckGo(query) |
|
|
|
def DuckDuckGo(query): |
|
search_tool = DuckDuckGoSearchRun() |
|
results = search_tool.invoke(query) |
|
return results |
|
|
|
|
|
|
|
from langchain_community.tools import WikipediaQueryRun |
|
from langchain_community.utilities import WikipediaAPIWrapper |
|
import requests |
|
from bs4 import BeautifulSoup |
|
|
|
def callWikipediaSearch(query): |
|
return callWikipediaLangchain(query) |
|
|
|
def callWikipediaLangchain(query): |
|
wikipedia = WikipediaQueryRun(api_wrapper=WikipediaAPIWrapper()) |
|
response = wikipedia.run(query) |
|
return response |
|
|
|
def callCustomWikipediaSearch(query): |
|
searchURL = config.wikipediaSearchURL + query |
|
response = requests.get(searchURL, timeout=60) |
|
response.raise_for_status() |
|
searchResult = response.json() |
|
for pageId in searchResult['query']['pages']: |
|
if searchResult['query']['pages'][pageId]['index'] == 1: |
|
page = searchResult['query']['pages'][pageId]['title'] |
|
|
|
|
|
|
|
response2 = requests.get(config.wikipediaRetrieveURL + page) |
|
response2.raise_for_status() |
|
raw_html = response2.text |
|
soup = BeautifulSoup(raw_html, 'html.parser') |
|
raw_text = soup.get_text(separator=" ",strip=True) |
|
return raw_text |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return "No result found in wikipedia. Search elsewhere!!" |
|
|
|
|
|
|
|
from huggingface_hub import InferenceClient |
|
|
|
def callLLM(query): |
|
if "LOCALLLM" in os.environ: |
|
return callLocalLLM(query) |
|
else: |
|
return callHfInferenceClientLLM(query) |
|
|
|
def callLocalLLM(query): |
|
response = OllamaChat(model=os.environ["LOCALLLM"], messages=[ { 'role': 'user', 'content': query } ]) |
|
return response['message']['content'] |
|
|
|
def callHfInferenceClientLLM(query): |
|
client = InferenceClient(config.hfMoldel) |
|
response = client.chat.completions.create( |
|
messages = [ {"role": "user", "content": query } ], |
|
stream=False, max_tokens=1024 ) |
|
return response.choices[0].message.content |
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
os.environ["LOCALLLM"] = "llama3.2" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
print(callWikipediaSearch("Mercedes Sosa discography")) |
|
|