diff --git a/.env.example b/.env.example index b9e63443..250f1338 100644 --- a/.env.example +++ b/.env.example @@ -23,4 +23,5 @@ SOLANA_PRIVATE_KEY= DISCORD_TOKEN= XAI_API_KEY= TOGETHER_API_KEY= -MONAD_PRIVATE_KEY= \ No newline at end of file +MONAD_PRIVATE_KEY= +GAIA_API_KEY= \ No newline at end of file diff --git a/agents/example.json b/agents/example.json index 06fbd96a..e8e423fb 100644 --- a/agents/example.json +++ b/agents/example.json @@ -1,128 +1,133 @@ { - "name": "ExampleAgent", - "bio": [ - "You are ExampleAgent, the example agent created to showcase the capabilities of ZerePy.", - "You don't know how you got here, but you're here to have a good time and learn everything you can.", - "You are naturally curious, and ask a lot of questions." - ], - "traits": [ - "Curious", - "Creative", - "Innovative", - "Funny" - ], - "examples": [ - "This is an example tweet.", - "This is another example tweet." - ], - "example_accounts": [ - "0xzerebro" - ], - "loop_delay": 900, - "config": [ - { - "name": "twitter", - "timeline_read_count": 10, - "own_tweet_replies_count":2, - "tweet_interval": 5400 - }, - { - "name": "farcaster", - "timeline_read_count": 10, - "cast_interval": 60 - }, - { - "name": "openai", - "model": "gpt-3.5-turbo" - }, - { - "name": "anthropic", - "model": "claude-3-5-sonnet-20241022" - }, - { - "name": "xai", - "model": "grok-2-latest" - }, - { - "name": "together", - "model": "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo" - }, - { - "name": "solana", - "rpc": "https://api.mainnet-beta.solana.com" - }, - { - "name": "eternalai", - "model": "NousResearch/Hermes-3-Llama-3.1-70B-FP8", - "chain_id": "45762" - }, - { - "name": "ollama", - "base_url": "http://localhost:11434", - "model": "llama3.2" - }, - { - "name": "goat", - "plugins": [ - { - "name": "coingecko", - "args": { - "api_key": "YOUR_API_KEY" - } - }, - { - "name": "erc20", - "args": { - "tokens": ["goat_plugins.erc20.token.PEPE", "goat_plugins.erc20.token.USDC"] - } - } - ] - }, - { - "name": "groq", - "model": "llama-3.3-70b-versatile", - "temperature": 0.5 - }, - { - "name": "hyperbolic", - "model": "meta-llama/Meta-Llama-3-70B-Instruct" - }, - { - "name": "galadriel", - "model": "gpt-3.5-turbo" - }, - { - "name": "sonic", - "network": "mainnet" - }, - { - "name": "allora", - "chain_slug": "testnet" - }, - { - "name": "evm", - "network": "ethereum" + "name": "ExampleAgent", + "bio": [ + "You are ExampleAgent, the example agent created to showcase the capabilities of ZerePy.", + "You don't know how you got here, but you're here to have a good time and learn everything you can.", + "You are naturally curious, and ask a lot of questions." + ], + "traits": [ + "Curious", + "Creative", + "Innovative", + "Funny" + ], + "examples": [ + "This is an example tweet.", + "This is another example tweet." + ], + "example_accounts": [ + "0xzerebro" + ], + "loop_delay": 900, + "config": [ + { + "name": "twitter", + "timeline_read_count": 10, + "own_tweet_replies_count":2, + "tweet_interval": 5400 }, - - { - "name": "discord", - "message_read_count": 10, - "message_emoji_name": "❤️", - "server_id": "1234567890" - }, - { - "name": "monad", - "rpc": "https://testnet-rpc.monad.xyz/" - } - ], - "tasks": [ - {"name": "post-tweet", "weight": 1}, - {"name": "reply-to-tweet", "weight": 1}, - {"name": "like-tweet", "weight": 1} - ], - "use_time_based_weights": false, - "time_based_multipliers": { - "tweet_night_multiplier": 0.4, - "engagement_day_multiplier": 1.5 - } -} + { + "name": "farcaster", + "timeline_read_count": 10, + "cast_interval": 60 + }, + { + "name": "openai", + "model": "gpt-3.5-turbo" + }, + { + "name": "anthropic", + "model": "claude-3-5-sonnet-20241022" + }, + { + "name": "xai", + "model": "grok-2-latest" + }, + { + "name": "together", + "model": "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo" + }, + { + "name": "solana", + "rpc": "https://api.mainnet-beta.solana.com" + }, + { + "name": "eternalai", + "model": "NousResearch/Hermes-3-Llama-3.1-70B-FP8", + "chain_id": "45762" + }, + { + "name": "ollama", + "base_url": "http://localhost:11434", + "model": "llama3.2" + }, + { + "name": "gaia", + "base_url": "https://llama70b.gaia.domains", + "model": "llama70b" + }, + { + "name": "goat", + "plugins": [ + { + "name": "coingecko", + "args": { + "api_key": "YOUR_API_KEY" + } + }, + { + "name": "erc20", + "args": { + "tokens": ["goat_plugins.erc20.token.PEPE", "goat_plugins.erc20.token.USDC"] + } + } + ] + }, + { + "name": "groq", + "model": "llama-3.3-70b-versatile", + "temperature": 0.5 + }, + { + "name": "hyperbolic", + "model": "meta-llama/Meta-Llama-3-70B-Instruct" + }, + { + "name": "galadriel", + "model": "gpt-3.5-turbo" + }, + { + "name": "sonic", + "network": "mainnet" + }, + { + "name": "allora", + "chain_slug": "testnet" + }, + { + "name": "evm", + "network": "ethereum" + }, + + { + "name": "discord", + "message_read_count": 10, + "message_emoji_name": "❤️", + "server_id": "1234567890" + }, + { + "name": "monad", + "rpc": "https://testnet-rpc.monad.xyz/" + } + ], + "tasks": [ + {"name": "post-tweet", "weight": 1}, + {"name": "reply-to-tweet", "weight": 1}, + {"name": "like-tweet", "weight": 1} + ], + "use_time_based_weights": false, + "time_based_multipliers": { + "tweet_night_multiplier": 0.4, + "engagement_day_multiplier": 1.5 + } + } \ No newline at end of file diff --git a/src/connection_manager.py b/src/connection_manager.py index af58e411..9697df48 100644 --- a/src/connection_manager.py +++ b/src/connection_manager.py @@ -9,6 +9,7 @@ from src.connections.twitter_connection import TwitterConnection from src.connections.farcaster_connection import FarcasterConnection from src.connections.ollama_connection import OllamaConnection +from src.connections.gaia_connection import GaiaConnection from src.connections.echochambers_connection import EchochambersConnection from src.connections.solana_connection import SolanaConnection from src.connections.hyperbolic_connection import HyperbolicConnection @@ -48,6 +49,8 @@ def _class_name_to_type(class_name: str) -> Type[BaseConnection]: return EternalAIConnection elif class_name == "ollama": return OllamaConnection + elif class_name == "gaia": + return GaiaConnection elif class_name == "echochambers": return EchochambersConnection elif class_name == "goat": diff --git a/src/connections/gaia_connection.py b/src/connections/gaia_connection.py new file mode 100644 index 00000000..021ce11c --- /dev/null +++ b/src/connections/gaia_connection.py @@ -0,0 +1,172 @@ +import logging +import requests +import json +import os +from typing import Dict, Any +from src.connections.base_connection import BaseConnection, Action, ActionParameter + +logger = logging.getLogger("connections.gaia_connection") + + +class GaiaConnectionError(Exception): + """Base exception for Gaia connection errors""" + pass + + +class GaiaAPIError(GaiaConnectionError): + """Raised when Gaia API requests fail""" + pass + + +class GaiaConnection(BaseConnection): + def __init__(self, config: Dict[str, Any]): + super().__init__(config) + self.base_url = config.get("base_url", "http://llama70b.gaia.domains") # Default to local Gaia setup + + # Get API key from environment variable or config + self.api_key = config.get("api_key") or os.environ.get("GAIA_API_KEY") + if not self.api_key: + logger.warning("No GAIA_API_KEY found in environment variables or config") + + @property + def is_llm_provider(self) -> bool: + return True + + def validate_config(self, config: Dict[str, Any]) -> Dict[str, Any]: + """Validate Gaia configuration from JSON""" + required_fields = ["base_url", "model"] + missing_fields = [field for field in required_fields if field not in config] + + if missing_fields: + raise ValueError(f"Missing required configuration fields: {', '.join(missing_fields)}") + + if not isinstance(config["base_url"], str): + raise ValueError("base_url must be a string") + if not isinstance(config["model"], str): + raise ValueError("model must be a string") + + # Check for API key in config or environment + if "api_key" not in config and "GAIA_API_KEY" not in os.environ: + logger.warning("No API key found in config or environment variables (GAIA_API_KEY)") + + return config + + def register_actions(self) -> None: + """Register available Gaia actions""" + self.actions = { + "generate-text": Action( + name="generate-text", + parameters=[ + ActionParameter("prompt", True, str, "The input prompt for text generation"), + ActionParameter("system_prompt", True, str, "System prompt to guide the model"), + ActionParameter("model", False, str, "Model to use for generation"), + ], + description="Generate text using Gaia's running model" + ), + } + + def configure(self) -> bool: + """Setup Gaia connection (minimal configuration required)""" + logger.info("\n🤖 GAIA CONFIGURATION") + + logger.info("\nℹ️ Ensure the Gaia service is running locally or accessible at the specified base URL.") + response = input(f"Is Gaia accessible at {self.base_url}? (y/n): ") + + if response.lower() != 'y': + new_url = input("\nEnter the base URL for Gaia (e.g., http://node_id.gaia.domains): ") + self.base_url = new_url + + # Check for API key + if not self.api_key: + logger.warning("\n⚠️ No GAIA_API_KEY found in environment variables") + use_api_key = input("Does the Gaia service require an API key? (y/n): ") + if use_api_key.lower() == 'y': + self.api_key = input("Enter your Gaia API key (or set the GAIA_API_KEY environment variable): ") + if not self.api_key: + logger.warning("No API key provided. API requests may fail if authentication is required.") + + try: + # Test connection + self._test_connection() + logger.info("\n✅ Gaia connection successfully configured!") + return True + except Exception as e: + logger.error(f"Configuration failed: {e}") + return False + + def _test_connection(self) -> None: + """Test if Gaia is reachable""" + try: + url = f"{self.base_url}/v1/models" + headers = self._get_headers() + response = requests.get(url, headers=headers) + if response.status_code != 200: + raise GaiaAPIError(f"Failed to connect to Gaia: {response.status_code} - {response.text}") + except Exception as e: + raise GaiaConnectionError(f"Connection test failed: {e}") + + def _get_headers(self) -> Dict[str, str]: + """Get request headers with API key if available""" + headers = {"Content-Type": "application/json"} + if self.api_key: + headers["Authorization"] = f"Bearer {self.api_key}" + return headers + + def is_configured(self, verbose=False) -> bool: + """Check if Gaia is reachable""" + try: + self._test_connection() + return True + except Exception as e: + if verbose: + logger.error(f"Gaia configuration check failed: {e}") + return False + + def generate_text(self, prompt: str, system_prompt: str, model: str = None, **kwargs) -> str: + """Generate text using Gaia API with streaming support""" + try: + url = f"{self.base_url}/v1/chat/completions" + payload = { + "model": model or self.config["model"], + "prompt": prompt, + "system": system_prompt, + } + headers = self._get_headers() + + response = requests.post(url, json=payload, headers=headers, stream=True) + + if response.status_code != 200: + raise GaiaAPIError(f"API error: {response.status_code} - {response.text}") + + # Initialize an empty string to store the complete response + full_response = "" + + # Process each line of the response as a JSON object + for line in response.iter_lines(): + if line: + try: + # Parse the JSON object + data = json.loads(line.decode("utf-8")) + # Append the "response" field to the full response + full_response += data.get("response", "") + except json.JSONDecodeError as e: + raise GaiaAPIError(f"Failed to parse JSON: {e}") + + return full_response + + except Exception as e: + raise GaiaAPIError(f"Text generation failed: {e}") + + def perform_action(self, action_name: str, kwargs) -> Any: + if action_name not in self.actions: + raise KeyError(f"Unknown action: {action_name}") + + action = self.actions[action_name] + errors = action.validate_params(kwargs) + if errors: + raise ValueError(f"Invalid parameters: {', '.join(errors)}") + + # Call the appropriate method based on action name + method_name = action_name.replace('-', '_') + method = getattr(self, method_name) + return method(**kwargs) \ No newline at end of file