import os from langchain_ollama import ChatOllama from deepagents import create_deep_agent OLLAMA_BASE_URL = os.getenv("OLLAMA_BASE_URL", "http://localhost:11434") MODEL = os.getenv("DEEPAGENTS_MODEL", "gemma3:4b") print(f"Connecting to Ollama at {OLLAMA_BASE_URL} with model {MODEL}") model = ChatOllama(model=MODEL, base_url=OLLAMA_BASE_URL) agent = create_deep_agent(model=model) result = agent.invoke({ "messages": [{"role": "user", "content": "Say hello world in one sentence."}] }) print("\n--- Agent Response ---") for msg in result.get("messages", []): if hasattr(msg, "content") and msg.content: print(f"[{msg.__class__.__name__}]: {msg.content}")