Skip to content

Commit

Permalink
Merge branch 'main' into jdub/farcaster-connection
Browse files Browse the repository at this point in the history
  • Loading branch information
ayoubed authored Dec 29, 2024
2 parents dd6fb3d + d655a12 commit 88b9b97
Show file tree
Hide file tree
Showing 4 changed files with 262 additions and 16 deletions.
36 changes: 36 additions & 0 deletions agents/eternalai-example.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
{
"name": "EternalAI",
"bio": [
"You are EternalAI, the example agent created to showcase the capabilities of ZerePy.",
"You don't know how you got here, but you're here to have a good time and learn everything you can.",
"You are naturally curious, and ask a lot of questions."
],
"traits": [
"Curious",
"Creative",
"Innovative",
"Funny"
],
"examples": [
"This is an example tweet.",
"This is another example tweet."
],
"loop_delay": 900,
"config": [
{
"name": "twitter",
"timeline_read_count": 10,
"own_tweet_replies_count":2,
"tweet_interval": 5400
},
{
"name": "eternalai",
"model": "unsloth/Llama-3.3-70B-Instruct-bnb-4bit"
}
],
"tasks": [
{"name": "post-tweet", "weight": 1},
{"name": "reply-to-tweet", "weight": 1},
{"name": "like-tweet", "weight": 1}
]
}
32 changes: 16 additions & 16 deletions src/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def __init__(
self,
agent_name: str
):
try:
try:
agent_path = Path("agents") / f"{agent_name}.json"
agent_dict = json.load(open(agent_path, "r"))

Expand All @@ -29,9 +29,9 @@ def __init__(
self.bio = agent_dict["bio"]
self.traits = agent_dict["traits"]
self.examples = agent_dict["examples"]
self.loop_delay = agent_dict["loop_delay"]
self.loop_delay = agent_dict["loop_delay"]
self.connection_manager = ConnectionManager(agent_dict["config"])

# Extract Twitter config
twitter_config = next((config for config in agent_dict["config"] if config["name"] == "twitter"), None)
if not twitter_config:
Expand All @@ -42,7 +42,7 @@ def __init__(
self.own_tweet_replies_count = twitter_config.get("own_tweet_replies_count", 2)

self.is_llm_set = False

# Cache for system prompt
self._system_prompt = None

Expand All @@ -52,18 +52,18 @@ def __init__(

# Set up empty agent state
self.state = {}

except Exception as e:
logger.error("Could not load ZerePy agent")
raise e
def _setup_llm_provider(self):

def _setup_llm_provider(self):
# Get first available LLM provider and its model
llm_providers = self.connection_manager.get_model_providers()
if not llm_providers:
raise ValueError("No configured LLM provider found")
self.model_provider = llm_providers[0]

# Load Twitter username for self-reply detection
load_dotenv()
self.username = os.getenv('TWITTER_USERNAME', '').lower()
Expand Down Expand Up @@ -91,13 +91,13 @@ def _construct_system_prompt(self) -> str:
def prompt_llm(self, prompt: str, system_prompt: str = None) -> str:
"""Generate text using the configured LLM provider"""
system_prompt = system_prompt or self._construct_system_prompt()

return self.connection_manager.perform_action(
connection_name=self.model_provider,
action_name="generate-text",
params=[prompt, system_prompt]
)

def perform_action(self, connection: str, action: str, **kwargs) -> None:
return self.connection_manager.perform_action(connection, action, **kwargs)

Expand All @@ -120,11 +120,11 @@ def loop(self):

try:
while True:
success = False
success = False
try:
# REPLENISH INPUTS
# TODO: Add more inputs to complexify agent behavior
if "timeline_tweets" not in self.state or len(self.state["timeline_tweets"]) == 0:
if "timeline_tweets" not in self.state or self.state["timeline_tweets"] is None or len(self.state["timeline_tweets"]) == 0:
logger.info("\n👀 READING TIMELINE")
self.state["timeline_tweets"] = self.connection_manager.perform_action(
connection_name="twitter",
Expand Down Expand Up @@ -167,7 +167,7 @@ def loop(self):
continue

elif action_name == "reply-to-tweet":
if "timeline_tweets" in self.state and len(self.state["timeline_tweets"]) > 0:
if "timeline_tweets" in self.state and self.state["timeline_tweets"] is not None and len(self.state["timeline_tweets"]) > 0:
# Get next tweet from inputs
tweet = self.state["timeline_tweets"].pop(0)
tweet_id = tweet.get('id')
Expand Down Expand Up @@ -208,7 +208,7 @@ def loop(self):
logger.info("✅ Reply posted successfully!")

elif action_name == "like-tweet":
if "timeline_tweets" in self.state and len(self.state["timeline_tweets"]) > 0:
if "timeline_tweets" in self.state and self.state["timeline_tweets"] is not None and len(self.state["timeline_tweets"]) > 0:
# Get next tweet from inputs
tweet = self.state["timeline_tweets"].pop(0)
tweet_id = tweet.get('id')
Expand All @@ -228,12 +228,12 @@ def loop(self):

logger.info(f"\n⏳ Waiting {self.loop_delay} seconds before next loop...")
print_h_bar()
time.sleep(self.loop_delay if success else 60)
time.sleep(self.loop_delay if success else 60)

except Exception as e:
logger.error(f"\n❌ Error in agent loop iteration: {e}")
logger.info(f"⏳ Waiting {self.loop_delay} seconds before retrying...")
time.sleep(self.loop_delay)
time.sleep(self.loop_delay)

except KeyboardInterrupt:
logger.info("\n🛑 Agent loop stopped by user.")
Expand Down
3 changes: 3 additions & 0 deletions src/connection_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from typing import Any, List, Optional, Type, Dict
from src.connections.base_connection import BaseConnection
from src.connections.anthropic_connection import AnthropicConnection
from src.connections.eternalai_connection import EternalAIConnection
from src.connections.openai_connection import OpenAIConnection
from src.connections.twitter_connection import TwitterConnection
from src.connections.farcaster_connection import FarcasterConnection
Expand All @@ -24,6 +25,8 @@ def _class_name_to_type(class_name: str) -> Type[BaseConnection]:
return OpenAIConnection
elif class_name == "farcaster":
return FarcasterConnection
elif class_name == "eternalai":
return EternalAIConnection

return None

Expand Down
207 changes: 207 additions & 0 deletions src/connections/eternalai_connection.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,207 @@
import logging
import os
from typing import Dict, Any
from dotenv import load_dotenv, set_key
from openai import OpenAI
from src.connections.base_connection import BaseConnection, Action, ActionParameter

logger = logging.getLogger(__name__)


class EternalAIConnectionError(Exception):
"""Base exception for EternalAI connection errors"""
pass


class EternalAIConfigurationError(EternalAIConnectionError):
"""Raised when there are configuration/credential issues"""
pass


class EternalAIAPIError(EternalAIConnectionError):
"""Raised when EternalAI API requests fail"""
pass


class EternalAIConnection(BaseConnection):
def __init__(self, config: Dict[str, Any]):
super().__init__(config)
self._client = None

@property
def is_llm_provider(self) -> bool:
return True

def validate_config(self, config: Dict[str, Any]) -> Dict[str, Any]:
"""Validate EternalAI configuration from JSON"""
required_fields = ["model"]
missing_fields = [field for field in required_fields if field not in config]

if missing_fields:
raise ValueError(f"Missing required configuration fields: {', '.join(missing_fields)}")

# Validate model exists (will be checked in detail during configure)
if not isinstance(config["model"], str):
raise ValueError("model must be a string")

return config

def register_actions(self) -> None:
"""Register available EternalAI actions"""
self.actions = {
"generate-text": Action(
name="generate-text",
parameters=[
ActionParameter("prompt", True, str, "The input prompt for text generation"),
ActionParameter("system_prompt", True, str, "System prompt to guide the model"),
ActionParameter("model", False, str, "Model to use for generation")
],
description="Generate text using EternalAI models"
),
"check-model": Action(
name="check-model",
parameters=[
ActionParameter("model", True, str, "Model name to check availability")
],
description="Check if a specific model is available"
),
"list-models": Action(
name="list-models",
parameters=[],
description="List all available EternalAI models"
)
}

def _get_client(self) -> OpenAI:
"""Get or create EternalAI client"""
if not self._client:
api_key = os.getenv("EternalAI_API_KEY")
api_url = os.getenv("EternalAI_API_URL")
if not api_key:
raise EternalAIConfigurationError("EternalAI API key not found in environment")
self._client = OpenAI(api_key=api_key, base_url=api_url)
return self._client

def configure(self) -> bool:
"""Sets up EternalAI API authentication"""
print("\n🤖 EternalAI API SETUP")

if self.is_configured():
print("\nEternalAI API is already configured.")
response = input("Do you want to reconfigure? (y/n): ")
if response.lower() != 'y':
return True

print("\n📝 To get your EternalAI API credentials:")
print("1. Go to https://eternalai.org/api")
print("2. Generate an API Key")
print("3. Use API url as https://api.eternalai.org/v1/")

api_key = input("\nEnter your EternalAI API key: ")
api_url = input("\nEnter your EternalAI API url: ")

try:
if not os.path.exists('.env'):
with open('.env', 'w') as f:
f.write('')

set_key('.env', 'EternalAI_API_KEY', api_key)
set_key('.env', 'EternalAI_API_URL', api_url)

# Validate the API key by trying to list models
client = OpenAI(api_key=api_key, base_url=api_url)
client.models.list()

print("\n✅ EternalAI API configuration successfully saved!")
print("Your API key has been stored in the .env file.")
return True

except Exception as e:
logger.error(f"Configuration failed: {e}")
return False

def is_configured(self, verbose=False) -> bool:
"""Check if EternalAI API key is configured and valid"""
try:
load_dotenv()
api_key = os.getenv('EternalAI_API_KEY')
api_url = os.getenv('EternalAI_API_URL')
if not api_key or not api_url:
return False

client = OpenAI(api_key=api_key, base_url=api_url)
client.models.list()
return True

except Exception as e:
if verbose:
logger.debug(f"Configuration check failed: {e}")
return False

def generate_text(self, prompt: str, system_prompt: str, model: str = None, **kwargs) -> str:
"""Generate text using EternalAI models"""
try:
client = self._get_client()

# Use configured model if none provided
if not model:
model = self.config["model"]
print("model", model)
completion = client.chat.completions.create(
model=model,
messages=[
{"role": "system", "content": system_prompt},
{"role": "user", "content": prompt},
],
)

return completion.choices[0].message.content

except Exception as e:
raise EternalAIAPIError(f"Text generation failed: {e}")

def check_model(self, model, **kwargs):
try:
client = self._get_client
try:
client.models.retrieve(model=model)
# If we get here, the model exists
return True
except Exception:
return False
except Exception as e:
raise EternalAIAPIError(e)

def list_models(self, **kwargs) -> None:
"""List all available EternalAI models"""
try:
client = self._get_client()
response = client.models.list().data
#
fine_tuned_models = [
model for model in response
if model.owned_by in ["organization", "user", "organization-owner"]
]
#
if fine_tuned_models:
logger.info("\nFINE-TUNED MODELS:")
for i, model in enumerate(fine_tuned_models):
logger.info(f"{i + 1}. {model.id}")

except Exception as e:
raise EternalAIAPIError(f"Listing models failed: {e}")

def perform_action(self, action_name: str, kwargs) -> Any:
"""Execute a Twitter action with validation"""
if action_name not in self.actions:
raise KeyError(f"Unknown action: {action_name}")

action = self.actions[action_name]
errors = action.validate_params(kwargs)
if errors:
raise ValueError(f"Invalid parameters: {', '.join(errors)}")

# Call the appropriate method based on action name
method_name = action_name.replace('-', '_')
method = getattr(self, method_name)
return method(**kwargs)

0 comments on commit 88b9b97

Please sign in to comment.