This example, Agentipy CoinGecko Explorer, is a simple, asynchronous tool that leverages Agentipy’s LangChain-based CoinGecko tools to fetch, process, and explore live cryptocurrency data. This tool demonstrates how to orchestrate multiple data-fetching tasks—including trending tokens, price data, detailed token info, and top gainers—using LangGraph, and how to integrate LLM analysis with OpenAI's GPT-3.5-turbo.
Features
Fetch Trending Tokens: Retrieves trending tokens data from CoinGecko.
Fetch Price Data: Extracts current price data for the top 5 tokens.
Token Information: Fetches detailed token info, with fallback logic if the token address is invalid.
Top Gainers: Retrieves the list of tokens with the highest gains in the last 24 hours.
LLM Analysis: Integrates OpenAI’s GPT-3.5-turbo to analyze token info and generate insights.
State Memory Logging: Maintains a log of workflow steps for robust debugging and traceability.
Code (coingecko_explorer.py)
import jsonimport asyncioimport openaiimport refrom typing import TypedDict, List, Dict, Optional, Annotatedfrom langgraph.graph import StateGraphfrom agentipy.agent import SolanaAgentKitfrom agentipy.langchain.coingecko import get_coingecko_tools# Set your OpenAI API key here or via environment variableopenai.api_key =""classAgentState(TypedDict): memory: Annotated[List[str],"multi"] trending_tokens: Optional[Dict] price_data: Optional[Dict] token_info: Optional[Dict] top_gainers: Optional[Dict] token_analysis: Optional[str]solana_kit =SolanaAgentKit(private_key="",# Use your private keyrpc_url="https://api.mainnet-beta.solana.com")tools =get_coingecko_tools(solana_kit)defextract_trending_list(trending_tokens):""" Helper to extract a list of tokens from trending_tokens. If trending_tokens is a dict with a 'coins' key, return that list; otherwise, assume trending_tokens is already a list."""ifisinstance(trending_tokens,dict):return trending_tokens.get("coins",[])return trending_tokensdefis_valid_solana_address(addr:str)->bool:""" Simple heuristic: Solana addresses are Base58 strings typically 32-44 characters long."""ifnot(32<=len(addr)<=44):returnFalse valid_chars ="123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"returnall(c in valid_chars for c in addr)asyncdeffetch_trending_tokens(state: AgentState)-> AgentState:"""Node: Fetch trending tokens""" tool =next(t for t in tools if t.name =="coingecko_get_trending_tokens") result =await tool._arun() state['memory'].append("Fetched trending tokens") state['trending_tokens']= result['trending_tokens']return stateasyncdeffetch_price_data(state: AgentState)-> AgentState:"""Node: Fetch price data for trending tokens"""ifnot state['trending_tokens']:return state trending_list =extract_trending_list(state['trending_tokens'])ifnot trending_list: state['memory'].append("No tokens available for price data")return state tool =next(t for t in tools if t.name =="coingecko_get_token_price_data") addresses =[token["item"]["id"]for token in trending_list[:5]] input_json = json.dumps({"token_addresses": addresses}) result =await tool._arun(input_json) state['memory'].append(f"Fetched price data for {len(addresses)} tokens") state['price_data']= result['price_data']return stateasyncdeffetch_token_info(state: AgentState)-> AgentState:"""Node: Fetch info for first token"""if state['trending_tokens']: trending_list =extract_trending_list(state['trending_tokens'])if trending_list: token_item = trending_list[0]["item"] address = token_item.get("contract_address")or token_item.get("id")ifnotis_valid_solana_address(address): state['memory'].append(f"Token address '{address}' is not a valid Solana address. Using token item data instead.") state['token_info']= token_itemreturn state tool =next(t for t in tools if t.name =="coingecko_get_token_info") result =await tool._arun(json.dumps({"token_address": address})) state['memory'].append(f"Fetched token info for: {address}") state['token_info']= result['token_info']return stateasyncdeffetch_top_gainers(state: AgentState)-> AgentState:"""Node: Fetch top gainers""" tool =next(t for t in tools if t.name =="coingecko_get_top_gainers") result =await tool._arun(json.dumps({"duration":"24h","top_coins":10})) state['memory'].append("Fetched top 24h gainers") state['top_gainers']= result['top_gainers']return stateasyncdefllm_interaction(state: AgentState)-> AgentState:"""Node: Use OpenAI's LLM for interaction based on token info."""ifnot state['token_info']: state['memory'].append("No token info available to analyze with LLM.")return state prompt =f"Please analyze the following token info and provide insights:\n{json.dumps(state['token_info'],indent=2)}"try: response =await asyncio.to_thread(lambda: openai.ChatCompletion.create(model="gpt-3.5-turbo",messages=[{"role":"user","content": prompt}],temperature=0.7)) analysis = response['choices'][0]['message']['content'] state['memory'].append("LLM analysis completed.") state['token_analysis']= analysisexceptExceptionas e: state['memory'].append(f"LLM interaction failed: {str(e)}")return state# Build graph with unique node keysgraph =StateGraph(AgentState)graph.add_node("node_trending_tokens", fetch_trending_tokens)graph.add_node("node_price_data", fetch_price_data)graph.add_node("node_token_info", fetch_token_info)graph.add_node("node_top_gainers", fetch_top_gainers)graph.add_node("node_llm_interaction", llm_interaction)graph.set_entry_point("node_trending_tokens")graph.add_edge("node_trending_tokens","node_price_data")graph.add_edge("node_price_data","node_token_info")graph.add_edge("node_token_info","node_top_gainers")graph.add_edge("node_top_gainers","node_llm_interaction")app = graph.compile()asyncdefmain(): initial_state =AgentState(memory=[],trending_tokens=None,price_data=None,token_info=None,top_gainers=None,token_analysis=None)returnawait app.ainvoke(initial_state)if__name__=="__main__": final_state = asyncio.run(main())# Print trending tokens (showing the top 5) trending_list =extract_trending_list(final_state['trending_tokens'])print("Trending Tokens:")for token in trending_list[:5]: item = token.get("item",{})print(f"- ID: {item.get('id')}, Name: {item.get('name')}, Symbol: {item.get('symbol')}")print("\nPrice Data for 5 Tokens:")print(json.dumps(final_state['price_data'],indent=2))if final_state['token_info']: token_info = final_state['token_info'] minimal_info ={"id": token_info.get("id"),"name": token_info.get("name"),"symbol": token_info.get("symbol"),} data = token_info.get("data",{})if data: minimal_info["price"]= data.get("price") minimal_info["market_cap"]= data.get("market_cap")print("\nMinimal Final Token Info:")print(json.dumps(minimal_info,indent=2))print("\nLLM Analysis:")print(final_state.get("token_analysis"))print("\nMemory History:")for entry in final_state['memory']:print(f"- {entry}")