|
|
import os |
|
|
import json |
|
|
import requests |
|
|
from fastapi import FastAPI, Request |
|
|
from fastapi.responses import HTMLResponse |
|
|
from fastapi.middleware.cors import CORSMiddleware |
|
|
from openai import OpenAI |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY") |
|
|
GOOGLE_CX = os.getenv("GOOGLE_CX") |
|
|
LLM_API_KEY = os.getenv("LLM_API_KEY") |
|
|
LLM_BASE_URL = os.getenv("LLM_BASE_URL", "https://api-15i2e8ze256bvfn6.aistudio-app.com/v1") |
|
|
|
|
|
|
|
|
def Google_Search_tool(queries: list) -> list: |
|
|
""" |
|
|
Performs a real web search using the Google Custom Search JSON API. |
|
|
""" |
|
|
if not GOOGLE_API_KEY or not GOOGLE_CX: |
|
|
print("ERROR: GOOGLE_API_KEY or GOOGLE_CX environment variables not set.") |
|
|
|
|
|
return [{"query": queries[0], "results": [{"dict": lambda: {"snippet": "Search is not configured."}}]}] |
|
|
|
|
|
query = queries[0] |
|
|
print(f"Executing Google Custom Search for: '{query}'") |
|
|
|
|
|
search_url = "https://www.googleapis.com/customsearch/v1" |
|
|
params = { |
|
|
"key": GOOGLE_API_KEY, |
|
|
"cx": GOOGLE_CX, |
|
|
"q": query, |
|
|
"num": 3 |
|
|
} |
|
|
|
|
|
try: |
|
|
response = requests.get(search_url, params=params, timeout=10) |
|
|
response.raise_for_status() |
|
|
search_results = response.json() |
|
|
|
|
|
|
|
|
class SearchResult: |
|
|
def __init__(self, title, url, snippet): |
|
|
self.source_title = title |
|
|
self.url = url |
|
|
self.snippet = snippet |
|
|
def dict(self): |
|
|
return self.__dict__ |
|
|
|
|
|
class SearchResultsContainer: |
|
|
def __init__(self, query, results): |
|
|
self.query = query |
|
|
self.results = results |
|
|
|
|
|
|
|
|
parsed_snippets = [] |
|
|
if "items" in search_results: |
|
|
for item in search_results["items"]: |
|
|
parsed_snippets.append( |
|
|
SearchResult( |
|
|
title=item.get("title"), |
|
|
url=item.get("link"), |
|
|
snippet=item.get("snippet") |
|
|
) |
|
|
) |
|
|
|
|
|
return [SearchResultsContainer(query=query, results=parsed_snippets)] |
|
|
|
|
|
except requests.exceptions.RequestException as e: |
|
|
print(f"Error during Google search request: {e}") |
|
|
return [] |
|
|
|
|
|
|
|
|
app = FastAPI() |
|
|
|
|
|
app.add_middleware( |
|
|
CORSMiddleware, |
|
|
allow_origins=["*"], |
|
|
allow_credentials=True, |
|
|
allow_methods=["*"], |
|
|
allow_headers=["*"], |
|
|
) |
|
|
|
|
|
|
|
|
|
|
|
if not LLM_API_KEY or not LLM_BASE_URL: |
|
|
print("WARNING: LLM_API_KEY or LLM_BASE_URL is not set. The /chat endpoint will fail.") |
|
|
client = None |
|
|
else: |
|
|
client = OpenAI( |
|
|
api_key=LLM_API_KEY, |
|
|
base_url=LLM_BASE_URL |
|
|
) |
|
|
|
|
|
|
|
|
available_tools = [ |
|
|
{ |
|
|
"type": "function", |
|
|
"function": { |
|
|
"name": "Google Search", |
|
|
"description": "Performs a Google search to find information on the internet. Use this when the user asks a question that requires up-to-date, external, or real-time knowledge (e.g., current events, weather, specific facts not in training data, definitions, 'what is', 'latest', 'news about').", |
|
|
"parameters": { |
|
|
"type": "object", |
|
|
"properties": { |
|
|
"query": { |
|
|
"type": "string", |
|
|
"description": "The search query based on the user's question, optimized for web search. Be concise and precise." |
|
|
} |
|
|
}, |
|
|
"required": ["query"] |
|
|
} |
|
|
} |
|
|
} |
|
|
] |
|
|
|
|
|
|
|
|
@app.post("/chat") |
|
|
async def chat_endpoint(request: Request): |
|
|
if not client: |
|
|
return {"response": "Error: The LLM client is not configured on the server. API keys may be missing."} |
|
|
|
|
|
try: |
|
|
data = await request.json() |
|
|
user_message = data.get("message") |
|
|
chat_history = data.get("history", []) |
|
|
|
|
|
if not user_message: |
|
|
return {"response": "Error: No message provided."} |
|
|
|
|
|
messages = chat_history + [{"role": "user", "content": user_message}] |
|
|
|
|
|
|
|
|
llm_response_1 = client.chat.completions.create( |
|
|
model="unsloth/Qwen3-30B-A3B-GGUF", |
|
|
temperature=0.6, |
|
|
messages=messages, |
|
|
stream=False, |
|
|
tools=available_tools, |
|
|
tool_choice="auto" |
|
|
) |
|
|
|
|
|
tool_calls = llm_response_1.choices[0].message.tool_calls |
|
|
if tool_calls: |
|
|
|
|
|
tool_outputs = [] |
|
|
for tool_call in tool_calls: |
|
|
function_name = tool_call.function.name |
|
|
function_args = json.loads(tool_call.function.arguments) |
|
|
|
|
|
if function_name == "Google Search": |
|
|
search_query = function_args.get("query") |
|
|
if search_query: |
|
|
search_results_obj = Google_Search_tool(queries=[search_query]) |
|
|
|
|
|
formatted_results = [] |
|
|
if search_results_obj and search_results_obj[0].results: |
|
|
for res in search_results_obj[0].results: |
|
|
formatted_results.append(f"Source: {res.source_title}\nURL: {res.url}\nSnippet: {res.snippet}") |
|
|
|
|
|
tool_output_content = "No relevant search results found." |
|
|
if formatted_results: |
|
|
tool_output_content = "Search Results:\n" + "\n---\n".join(formatted_results[:3]) |
|
|
|
|
|
tool_outputs.append({ |
|
|
"tool_call_id": tool_call.id, |
|
|
"output": tool_output_content |
|
|
}) |
|
|
else: |
|
|
tool_outputs.append({ |
|
|
"tool_call_id": tool_call.id, |
|
|
"output": f"Error: Tool '{function_name}' is not supported." |
|
|
}) |
|
|
|
|
|
|
|
|
messages.append(llm_response_1.choices[0].message) |
|
|
for output_item in tool_outputs: |
|
|
messages.append( |
|
|
{"role": "tool", "tool_call_id": output_item["tool_call_id"], "content": output_item["output"]} |
|
|
) |
|
|
|
|
|
llm_response_2 = client.chat.completions.create( |
|
|
model="unsloth/Qwen3-30B-A3B-GGUF", |
|
|
temperature=0.6, |
|
|
messages=messages, |
|
|
stream=False |
|
|
) |
|
|
final_chatbot_response = llm_response_2.choices[0].message.content |
|
|
else: |
|
|
final_chatbot_response = llm_response_1.choices[0].message.content |
|
|
|
|
|
return {"response": final_chatbot_response} |
|
|
|
|
|
except Exception as e: |
|
|
print(f"ERROR in /chat: {e}") |
|
|
return {"response": f"An internal error occurred: {str(e)}"} |
|
|
|
|
|
|
|
|
@app.get("/") |
|
|
async def root(): |
|
|
return {"message": "Chatbot FastAPI is running. Send POST requests to /chat."} |