Engchain / llm_client.py
usmansafdarktk
Initial commit for Hugging Face Space
a03bf1f
import os
import requests
from dotenv import load_dotenv
# Load .env file
load_dotenv()
API_KEY = os.getenv("API_KEY")
MODEL_NAME = os.getenv("MODEL_NAME")
BASE_URL = f"https://generativelanguage.googleapis.com/v1beta/models/{MODEL_NAME}:generateContent"
def ask_llm(question: str) -> str:
"""
Sends the given question to the Gemini LLM and returns its response.
Args:
question (str): The question to ask the LLM.
Returns:
str: The response from the LLM or an error message.
"""
headers = {"Content-Type": "application/json"}
params = {"key": API_KEY}
payload = {
"contents": [
{"parts": [{"text": f"Answer this financial question clearly:\n\n{question}"}]}
]
}
try:
resp = requests.post(BASE_URL, headers=headers, params=params, json=payload)
resp.raise_for_status()
data = resp.json()
return data["candidates"][0]["content"]["parts"][0]["text"]
except Exception as e:
return f"⚠️ Error calling LLM: {str(e)}"