diff --git a/README.md b/README.md index add8b851..9d6fb98b 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ Simple, unified interface to multiple Generative AI providers. `aisuite` makes it easy for developers to use multiple LLM through a standardized interface. Using an interface similar to OpenAI's, `aisuite` makes it easy to interact with the most popular LLMs and compare the results. It is a thin wrapper around python client libraries, and allows creators to seamlessly swap out and test responses from different LLM providers without changing their code. Today, the library is primarily focussed on chat completions. We will expand it cover more use cases in near future. Currently supported providers are - -OpenAI, Anthropic, Azure, Google, AWS, Groq, Mistral, HuggingFace Ollama, Sambanova and Watsonx. +OpenAI, Anthropic, Azure, Google, AWS, Groq, Mistral, HuggingFace Ollama, Sambanova, Watsonx, and Google Gemini. To maximize stability, `aisuite` uses either the HTTP endpoint or the SDK for making calls to the provider. ## Installation @@ -119,3 +119,59 @@ We follow a convention-based approach for loading providers, which relies on str in providers/openai_provider.py This convention simplifies the addition of new providers and ensures consistency across provider implementations. + +## Using Google Gemini API + +To use the Google Gemini API with `aisuite`, follow these steps: + +### Prerequisites + +1. **Google Cloud Account**: Ensure you have a Google Cloud account. If not, create one at [Google Cloud](https://cloud.google.com/). +2. **API Key**: Obtain an API key for the Google Gemini API. You can generate an API key from the [Google Cloud Console](https://console.cloud.google.com/). + +### Installation + +Install the `google-genai` Python client: + +Example with pip: +```shell +pip install google-genai +``` + +Example with poetry: +```shell +poetry add google-genai +``` + +### Configuration + +Set the `GEMINI_API_KEY` environment variable with your API key: + +```shell +export GEMINI_API_KEY="your-gemini-api-key" +``` + +### Create a Chat Completion + +In your code: +```python +import aisuite as ai +client = ai.Client() + +provider = "google_genai" +model_id = "gemini-2.0-flash-exp" + +messages = [ + {"role": "system", "content": "You are a helpful assistant."}, + {"role": "user", "content": "What’s the weather like in San Francisco?"}, +] + +response = client.chat.completions.create( + model=f"{provider}:{model_id}", + messages=messages, +) + +print(response.choices[0].message.content) +``` + +Happy coding! If you would like to contribute, please read our [Contributing Guide](https://github.com/andrewyng/aisuite/blob/main/CONTRIBUTING.md). diff --git a/aisuite/providers/google_genai_provider.py b/aisuite/providers/google_genai_provider.py new file mode 100644 index 00000000..a3618b34 --- /dev/null +++ b/aisuite/providers/google_genai_provider.py @@ -0,0 +1,49 @@ +import os +from google import genai +from google.genai import types +from aisuite.provider import Provider, LLMError +from aisuite.framework import ChatCompletionResponse + + +class GoogleGenaiProvider(Provider): + def __init__(self, **config): + self.api_key = config.get("api_key") or os.getenv("GEMINI_API_KEY") + if not self.api_key: + raise ValueError( + "Gemini API key is missing. Please provide it in the config or set the GEMINI_API_KEY environment variable." + ) + self.client = genai.Client(api_key=self.api_key) + + def chat_completions_create(self, model, messages, **kwargs): + try: + response = self.client.models.generate_content( + model=model, + contents=[message["content"] for message in messages], + **kwargs + ) + return self.normalize_response(response) + except Exception as e: + raise LLMError(f"Error in chat_completions_create: {str(e)}") + + def generate_content(self, model, contents, **kwargs): + try: + response = self.client.models.generate_content( + model=model, + contents=contents, + **kwargs + ) + return self.normalize_response(response) + except Exception as e: + raise LLMError(f"Error in generate_content: {str(e)}") + + def list_models(self): + try: + response = self.client.models.list() + return [model.name for model in response] + except Exception as e: + raise LLMError(f"Error in list_models: {str(e)}") + + def normalize_response(self, response): + normalized_response = ChatCompletionResponse() + normalized_response.choices[0].message.content = response.text + return normalized_response diff --git a/guides/google_genai.md b/guides/google_genai.md new file mode 100644 index 00000000..107ecfd6 --- /dev/null +++ b/guides/google_genai.md @@ -0,0 +1,55 @@ +# Google Gemini API + +To use the Google Gemini API with `aisuite`, follow these steps: + +## Prerequisites + +1. **Google Cloud Account**: Ensure you have a Google Cloud account. If not, create one at [Google Cloud](https://cloud.google.com/). +2. **API Key**: Obtain an API key for the Google Gemini API. You can generate an API key from the [Google Cloud Console](https://console.cloud.google.com/). + +## Installation + +Install the `google-genai` Python client: + +Example with pip: +```shell +pip install google-genai +``` + +Example with poetry: +```shell +poetry add google-genai +``` + +## Configuration + +Set the `GEMINI_API_KEY` environment variable with your API key: + +```shell +export GEMINI_API_KEY="your-gemini-api-key" +``` + +## Create a Chat Completion + +In your code: +```python +import aisuite as ai +client = ai.Client() + +provider = "google_genai" +model_id = "gemini-2.0-flash-exp" + +messages = [ + {"role": "system", "content": "You are a helpful assistant."}, + {"role": "user", "content": "What’s the weather like in San Francisco?"}, +] + +response = client.chat.completions.create( + model=f"{provider}:{model_id}", + messages=messages, +) + +print(response.choices[0].message.content) +``` + +Happy coding! If you would like to contribute, please read our [Contributing Guide](../CONTRIBUTING.md). diff --git a/pyproject.toml b/pyproject.toml index e96e6216..659c6ef6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,6 +15,7 @@ groq = { version = "^0.9.0", optional = true } mistralai = { version = "^1.0.3", optional = true } openai = { version = "^1.35.8", optional = true } ibm-watsonx-ai = { version = "^1.1.16", optional = true } +google-genai = { version = "^0.1.0", optional = true } # Optional dependencies for different providers httpx = "~0.27.0" @@ -30,7 +31,8 @@ mistral = ["mistralai"] ollama = [] openai = ["openai"] watsonx = ["ibm-watsonx-ai"] -all = ["anthropic", "aws", "google", "groq", "mistral", "openai", "cohere", "watsonx"] # To install all providers +google_genai = ["google-genai"] +all = ["anthropic", "aws", "google", "groq", "mistral", "openai", "cohere", "watsonx", "google_genai"] # To install all providers [tool.poetry.group.dev.dependencies] pre-commit = "^3.7.1"