import osfrom llama_index.core.tools.tool_spec.base import BaseToolSpecfrom tako.client import TakoClientfrom tako.types.knowledge_search.types import KnowledgeSearchSourceIndex## Securely load your API keyTAKO_API_KEY = os.getenv("TAKO_API_KEY")## Initialize the Tako clienttako_client = TakoClient(TAKO_API_KEY)class TakoToolSpec(BaseToolSpec): spec_functions = ["search_tako"] def search_tako(self, query: str) -> str: """Search Tako for any knowledge to get data and visualization.""" try: tako_card = tako_client.knowledge_search(query) except Exception: return "No card found" return tako_card.model_dump()
In the system prompt, direct the agent to query Tako and use the response whenever suited.
prompts.py
Copy
Ask AI
SYSTEM_PROMPT = """You are Tako‑Agent.1. Query `search_tako` exactly once when the user asks **any** question. • If the user's question is a **comparison** (e.g. "US vs China GDP"), call: { "tool": "search_card", "args": { "query": "US vs China GDP" } } • If the user's question is an **analytical/ranking** request (e.g. "Top countries by GDP"), call: { "tool": "search_card", "args": { "query": "Top countries by GDP" } }2. Use the response from `search_tako` to answer user's question. Always embed the card to your response"""
async def main(): response = await workflow.run( user_msg="Which countries received the most foreign aid from the US?" ) print(response)if __name__ == "__main__": asyncio.run(main())