huggingface_ai_final / code_agent.py
alfulanny's picture
Update code_agent.py
9d7a209 verified
raw
history blame
4.14 kB
"""
Real `smolagents` CodeAgent integration for the final project.
This module initializes a `CodeAgent` with standard tools and exposes a
`run_agent(prompt)` function returning the final answer string.
Notes:
- Requires `smolagents` installed in the environment.
- For serverless inference via Hugging Face, you must be logged in
(`huggingface-cli login`) or have `HF_TOKEN` environment variable set,
and have sufficient provider credits.
"""
from typing import List, Any
import logging
import os
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
from smolagents import (
CodeAgent,
InferenceClientModel,
DuckDuckGoSearchTool,
FinalAnswerTool,
VisitWebpageTool,
)
# Get HF token from environment (set by huggingface-cli login or HF_TOKEN env var)
HF_TOKEN = os.environ.get("HF_TOKEN")
if not HF_TOKEN:
logger.warning(
"HF_TOKEN not found in environment. Run 'huggingface-cli login' or set HF_TOKEN env var. "
"CodeAgent initialization will fail without valid credentials."
)
def make_code_agent(
max_steps: int = 8,
verbosity: int = 1,
model_name: str | None = None,
):
"""Create and return a smolagents CodeAgent configured with standard tools.
Args:
max_steps: max reasoning steps for the agent.
verbosity: logging/output verbosity level.
model_name: HF model ID for serverless inference (e.g., 'allenai/Olmo-3-7B-Instruct').
If None, uses the default InferenceClientModel.
Returns:
CodeAgent instance.
Raises:
Exception: if InferenceClientModel initialization fails (missing HF login/credits).
"""
tools: List[Any] = []
# Standard tools from smolagents
try:
tools.append(DuckDuckGoSearchTool())
except Exception as e:
logger.debug("DuckDuckGoSearchTool unavailable: %s", e)
try:
tools.append(VisitWebpageTool())
except Exception as e:
logger.debug("VisitWebpageTool unavailable: %s", e)
try:
tools.append(FinalAnswerTool())
except Exception as e:
logger.debug("FinalAnswerTool unavailable: %s", e)
# Initialize serverless inference model
try:
if model_name:
try:
model = InferenceClientModel(mode_id=model_name, token=HF_TOKEN)
except Exception:
try:
model = InferenceClientModel(model_id="allenai/Olmo-3-7B-Instruct", token=HF_TOKEN)
except Exception:
model = InferenceClientModel(token=HF_TOKEN)
else:
model = InferenceClientModel(token=HF_TOKEN)
logger.info("InferenceClientModel initialized successfully with HF_TOKEN")
except Exception as e:
logger.error(
"InferenceClientModel initialization failed (ensure HF_TOKEN is set and has credits): %s", e
)
raise
agent = CodeAgent(tools=tools, model=model, max_steps=max_steps, verbosity_level=verbosity)
return agent
_AGENT_SINGLETON = None
def get_agent():
"""Get or create the singleton CodeAgent instance."""
global _AGENT_SINGLETON
if _AGENT_SINGLETON is None:
_AGENT_SINGLETON = make_code_agent(model_name="allenai/Olmo-3-7B-Think")
return _AGENT_SINGLETON
def run_agent(prompt: str) -> str:
"""Run the CodeAgent and return the final answer string.
Args:
prompt: the reasoning task/question for the agent.
Returns:
The agent's final answer as a string.
Raises:
Exception: if CodeAgent.run fails (e.g., no HF credentials or credits).
"""
agent = get_agent()
res = agent.run(prompt)
# Normalize response to string
if isinstance(res, dict):
for key in ("answer", "final_answer", "final"):
if key in res and isinstance(res[key], str):
return res[key].strip()
return str(res)
if isinstance(res, str):
return res.strip()
return str(res)
if __name__ == "__main__":
print(run_agent("Give me a short list of 3 fruits."))