Skip to content

Commit

Permalink
Use abstract providers class
Browse files Browse the repository at this point in the history
  • Loading branch information
nachollorca committed Jan 19, 2025
1 parent 4e0aee7 commit ab841ab
Show file tree
Hide file tree
Showing 3 changed files with 48 additions and 41 deletions.
27 changes: 13 additions & 14 deletions src/agilm/providers/anthropic.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,15 @@
from anthropic import Anthropic
from ..types import Answer, Message, Model, Provider

from ..types import Answer, Message, Model


def get_answer(model: Model, conversation: list[Message], **kwargs) -> Answer:
client = Anthropic()
messages = [message.to_dict for message in conversation]
response = client.messages.create(
model=model.id, messages=messages, max_tokens=4096, **kwargs
)
return Answer(
content=response.content[0].text,
tokens_in=response.usage.input_tokens,
tokens_out=response.usage.output_tokens,
)
class AnthropicProvider(Provider):
def get_answer(self, model: Model, conversation: list[Message], **kwargs) -> Answer:
client = Anthropic()
messages = [message.dict for message in conversation]
response = client.messages.create(
model=model.id, messages=messages, max_tokens=4096, **kwargs
)
return Answer(
content=response.content[0].text,
tokens_in=response.usage.input_tokens,
tokens_out=response.usage.output_tokens,
)
31 changes: 15 additions & 16 deletions src/agilm/providers/together.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,17 @@
from together import Together
from ..types import Answer, Message, Model, Provider

from ..types import Answer, Message, Model


def get_answer(model: Model, conversation: list[Message], **kwargs) -> Answer:
client = Together()
messages = [message.to_dict for message in conversation]
response = client.chat.completions.create(
model=model.id,
messages=messages,
**kwargs
)
return Answer(
content=response.choices[0].message.content,
tokens_in=response.usage.prompt_tokens,
tokens_out=response.usage.completion_tokens
)
class TogetherProvider(Provider):
def get_answer(self, model: Model, conversation: list[Message], **kwargs) -> Answer:
client = Together()
messages = [message.dict for message in conversation]
response = client.chat.completions.create(
model=model.id,
messages=messages,
**kwargs
)
return Answer(
content=response.choices[0].message.content,
tokens_in=response.usage.prompt_tokens,
tokens_out=response.usage.completion_tokens
)
31 changes: 20 additions & 11 deletions src/agilm/providers/vertex.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,25 @@
import vertexai
from vertexai.generative_models import Content, GenerativeModel, Part

from ..types import Answer, Message, Model
from ..types import Answer, Message, Model, Provider


def get_answer(model: Model, conversation: list[Message], **kwargs) -> Answer:
contents = [
Content(role=message.role, parts=[Part.from_text(message.content)])
for message in conversation
class VertexProvider(Provider):
locations = [
"us-central1",
"eu-central1",
]
if model.locations:
vertexai.init(location=random.choice(model.locations))
client = GenerativeModel(model.id)
response = client.generate_content(contents=contents)
return response

def get_answer(self, model: Model, conversation: list[Message], **kwargs) -> Answer:
contents = [
Content(role=message.role, parts=[Part.from_text(message.content)])
for message in conversation
]
if self.locations and model.locations:
vertexai.init(location=random.choice(model.locations))
client = GenerativeModel(model.id)
response = client.generate_content(contents=contents)
return Answer(
content=response.text, # Adjust according to actual response structure
tokens_in=response.usage.input_tokens,
tokens_out=response.usage.output_tokens,
)

0 comments on commit ab841ab

Please sign in to comment.