From 003f3d0284c28324d96ee9af97dc02f7f26ecc3c Mon Sep 17 00:00:00 2001 From: Trevor Sullivan Date: Tue, 28 Jan 2025 12:56:25 -0700 Subject: [PATCH] =?UTF-8?q?Add=20Microsoft=20Azure=20OpenAI=20setup=20step?= =?UTF-8?q?s=20=E2=98=81=EF=B8=8F=20(#1670)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docker-compose.yml | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 6fcb1f086e..ab85d636ec 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -41,12 +41,21 @@ services: # - ENABLE_ANTHROPIC=true # - LLM_KEY=ANTHROPIC_CLAUDE3.5_SONNET # - ANTHROPIC_API_KEY= + # Microsoft Azure OpenAI support: + # If you'd like to use Microsoft Azure OpenAI as your managed LLM service integration with Skyvern, use the environment variables below. + # In your Microsoft Azure subscription, you will need to provision the OpenAI service and deploy a model, in order to utilize it. + # 1. Login to the Azure Portal + # 2. Create an Azure Resource Group + # 3. Create an OpenAI resource in the Resource Group (choose a region and pricing tier) + # 4. From the OpenAI resource's Overview page, open the "Azure AI Foundry" portal (click the "Explore Azure AI Foundry Portal" button) + # 5. In Azure AI Foundry, click "Shared Resources" --> "Deployments" + # 6. Click "Deploy Model" --> "Deploy Base Model" --> select a model (specify this model "Deployment Name" value for the AZURE_DEPLOYMENT variable below) # - ENABLE_AZURE=true - # - LLM_KEY=AZURE_OPENAI - # - AZURE_DEPLOYMENT= - # - AZURE_API_KEY= - # - AZURE_API_BASE= - # - AZURE_API_VERSION= + # - LLM_KEY=AZURE_OPENAI # Leave this value static, don't change it + # - AZURE_DEPLOYMENT= # Use the OpenAI model "Deployment Name" that you deployed, using the steps above + # - AZURE_API_KEY= # Copy and paste Key1 or Key2 from the OpenAI resource in Azure Portal + # - AZURE_API_BASE= # Copy and paste the "Endpoint" from the OpenAI resource in Azure Portal (eg. https://xyzxyzxyz.openai.azure.com/) + # - AZURE_API_VERSION= # Specify a valid Azure OpenAI data-plane API version (eg. 2024-08-01-preview) Docs: https://learn.microsoft.com/en-us/azure/ai-services/openai/reference # Amazon Bedrock Support: # Amazon Bedrock is a managed service that enables you to invoke LLMs and bill them through your AWS account. # To use Amazon Bedrock as the LLM provider for Skyvern, specify the following environment variables.