diff --git a/docs.json b/docs.json new file mode 100644 index 0000000..257469a --- /dev/null +++ b/docs.json @@ -0,0 +1,149 @@ +{ + "$schema": "https://mintlify.com/docs.json", + "theme": "mint", + "name": "Potpie", + "colors": { + "primary": "#0D9373", + "light": "#07C983", + "dark": "#0D9373" + }, + "favicon": "/favicon.svg", + "navigation": { + "tabs": [ + { + "tab": "Documentation", + "groups": [ + { + "group": "Get Started", + "pages": [ + "introduction", + "quickstart" + ] + }, + { + "group": "How to use Agents", + "pages": [ + "agents/introduction", + "agents/debugging-agent", + "agents/qna-agent", + "agents/integration-test-agent", + "agents/unit-test-agent", + "agents/code-changes-agent" + ] + }, + { + "group": "API Access ", + "pages": [ + "agents/api-access" + ] + }, + { + "group": "Custom Agents", + "pages": [ + "custom-agents/introduction", + "custom-agents/configuration", + { + "group": "Tools", + "pages": [ + "custom-agents/tools", + "custom-agents/tools/get_code_from_probable_node_name", + "custom-agents/tools/get_code_from_node_id", + "custom-agents/tools/get_code_from_multiple_node_ids", + "custom-agents/tools/ask_knowledge_graph_queries", + "custom-agents/tools/get_nodes_from_tags", + "custom-agents/tools/get_code_from_node_name", + "custom-agents/tools/get_code_graph_from_node_id", + "custom-agents/tools/get_code_graph_from_node_name", + "custom-agents/tools/change_detection" + ] + } + ] + } + ] + }, + { + "tab": "Open Source", + "openapi": { + "source": "https://production-api.potpie.ai/openapi.json", + "directory": "open-source" + }, + "groups": [ + { + "group": "Open Source", + "pages": [ + "open-source/setup", + "open-source/getting-started", + "open-source/llms", + "open-source/ollama-integration/ollama" + ] + }, + { + "group": "Partners", + "pages": [ + { + "group": "AgentOps-AI", + "pages": [ + "open-source/agentops/agentstack-qna", + "open-source/agentops/agentstack-lld", + "open-source/agentops/agentstack-unittest" + ] + }, + { + "group": "CrewAIInc", + "pages": [ + "open-source/crew-ai/crewai-qna", + "open-source/crew-ai/crewai-lld", + "open-source/crew-ai/crewai-unittest" + ] + } + ] + } + ] + } + ], + "global": { + "anchors": [ + { + "anchor": "Documentation", + "href": "https://docs.potpie.ai", + "icon": "book-open-cover" + }, + { + "anchor": "Community", + "href": "https://discord.gg/ryk5CMD5v6", + "icon": "discord" + }, + { + "anchor": "Blog", + "href": "https://www.potpie.ai/blog", + "icon": "newspaper" + } + ] + } + }, + "logo": { + "light": "/logo/light.png", + "dark": "/logo/dark.png" + }, + "navbar": { + "links": [ + { + "label": "Support", + "href": "mailto:hi@potpie.ai." + } + ], + "primary": { + "type": "button", + "label": "Star us on GitHub ⭐️ ", + "href": "https://github.com/potpie-ai/potpie" + } + }, + "footer": { + "socials": { + "x": "https://x.com/potpiedotai", + "github": "https://github.com/potpie-ai/potpie", + "linkedin": "https://www.linkedin.com/company/potpieai", + "discord": "https://discord.gg/ryk5CMD5v6" + } + } +} \ No newline at end of file diff --git a/open-source/llms.mdx b/open-source/llms.mdx new file mode 100644 index 0000000..1a39d19 --- /dev/null +++ b/open-source/llms.mdx @@ -0,0 +1,77 @@ +--- +title: "LLMs" +description: 'Setup for multiple LLMs' +--- + + +Potpie is designed to work with various large language models (LLMs).This documentation outlines the types of models supported by Potpie, how to configure them. + +## Supported Models + +Potpie supports a variety of Large Language Models (LLMs). Below is a list of the supported models, categorized by provider and size: + +--- + +### 1. **OpenAI Models** + +- **Small Model:** + `openai/gpt-4o-mini` + +- **Large Model:** + `openai/gpt-4o` + +--- + +### 2. **Anthropic Models** + +- **Small Model:** + `anthropic/claude-3-5-haiku-20241022` + +- **Large Model:** + `anthropic/claude-3-7-sonnet-20250219` + +--- + +### 3. **DeepSeek Models** + +- **Small Model:** + `openrouter/deepseek/deepseek-chat` + +- **Large Model:** + `openrouter/deepseek/deepseek-chat` + +--- + +### 4. **Meta Llama Models** + +- **Small Model:** + `openrouter/meta-llama/llama-3.3-70b-instruct` + +- **Large Model:** + `openrouter/meta-llama/llama-3.3-70b-instruct` + +--- + +### 5. **Google Gemini Models** + +- **Small Model:** + `openrouter/google/gemini-2.0-flash-001` + +- **Large Model:** + `openrouter/google/gemini-2.0-flash-001` + +--- + +## Configuration + +### Setting Up API Keys + +Before using any model, ensure that the appropriate API keys are set up. Potpie checks for API keys in the following order: + +1. **Environment Variables:** + - `LLM_API_KEY` + - `OPENAI_API_KEY` + - `{PROVIDER}_API_KEY` (e.g., `ANTHROPIC_API_KEY`) + +2. **Secret Manager:** + Potpie can retrieve API keys from a secret management service. Ensure that the keys are stored correctly for the user. diff --git a/open-source/ollama-integration/ollama.mdx b/open-source/ollama-integration/ollama.mdx new file mode 100644 index 0000000..c3db8e0 --- /dev/null +++ b/open-source/ollama-integration/ollama.mdx @@ -0,0 +1,74 @@ +--- +title: "Using Potpie with Ollama Models" +description: 'Configure and run Potpie with Ollama models for local use.' +--- + +# Running Potpie with Ollama Models + +In this guide, you'll learn how to configure and run Potpie with Ollama models on your local machine. + +## Step 1: Install Ollama + +Before using Ollama models with Potpie, you need to install the **Ollama CLI** tool. Ollama allows you to run models locally, and you can install it with the following commands: + +### Installation: + +1. Open a terminal and run the following command to download and install Ollama: + ```bash + curl -fsSL https://ollama.com/install.sh | sh + ``` +2. Once installed, verify the installation by running: + ```bash + ollama --version + ``` + +## Step 2: Set Up Ollama Models + +### Step 2.1: Pull the Required Models + +To run Potpie with Ollama, you need to download the models you plan to use. In this guide, we pull two models commonly used for low and high reasoning tasks: + +- **Low Reasoning Model:** Used for generating the knowledge graph. +- **High Reasoning Model:** Used for agent reasoning. + +Run the following commands to pull the models: + +```bash +# Pull the low reasoning model (used for knowledge graph generation) +ollama pull ollama_chat/qwen2.5-coder:7b + +# Pull the high reasoning model (used for agent reasoning) +ollama pull ollama_chat/qwen2.5-coder:7b +``` + +Note that the models you pull should be in the `provider/model_name` format, or they should be compatible with the format expected by Litellm. For more model options and details, refer to the [Litellm documentation](https://docs.litellm.ai/). + +## Step 3: Ollama API Key + +You can retrieve your API Key following these steps: + +1. **Sign in to Ollama**: Go to Ollama's official website and sign in to your account. +2. **Find Your API Key**: Navigate to the [ollama keys](https://ollama.com/settings/keys). +3. **Find the API Key Path**: You can now copy your API Key. + + +## Step 4: Configure Environment Variables + +Once you have installed Ollama and pulled the models, you need to configure your environment to use these models with Potpie. + +Open or create a `.env` file in the directory where you're running Potpie. Add the following configuration to specify the Ollama models: + +```bash +# Set the LLM provider to Ollama +LLM_PROVIDER=ollama + +# Set the API key +LLM_API_KEY=PASTE-YOUR-API-KEY-HERE + +# Specify the model used for low reasoning (knowledge graph generation) +LOW_REASONING_MODEL=ollama_chat/qwen2.5-coder:7b + +# Specify the model used for high reasoning (agent reasoning) +HIGH_REASONING_MODEL=ollama_chat/qwen2.5-coder:7b +``` +All Set! Potpie will now use Local Ollama Models. \ No newline at end of file diff --git a/open-source/setup.mdx b/open-source/setup.mdx index 1e2b2af..ed3665b 100644 --- a/open-source/setup.mdx +++ b/open-source/setup.mdx @@ -7,14 +7,25 @@ description: 'Get started with the potpie server setup locally' To set up Firebase, follow these steps: 1. **Create a Firebase Project**: Go to [Firebase Console](https://console.firebase.google.com/) and create a new project. 2. **Generate a Service Account Key**: - - Click on **Project Overview** from the sidebar. + - Click on **Project Overview Gear ⚙** from the sidebar. - Open the **Service Accounts** tab. - Click on the option to generate a new private key in the Firebase Admin SDK sub-section. - Read the warning and generate the key. Rename the downloaded key to `firebase_service_account.json` and move it to the root of the potpie source code. +3. **Create a Firebase App**: + - Go to the **Project Overview Gear ⚙** from the sidebar. + - Create a Firebase app. + - You will find keys for hosting, storage, and other services. Use these keys in your `.env` file. + +--- +# PostHog Integration +PostHog is an open-source platform that helps us analyze user behavior on Potpie. +- **Sign Up**: Create a free account at [PostHog](https://us.posthog.com/signup) and keep your API key in `.env` as `POSTHOG_API_KEY`, and `POSTHOG_HOST`. + --- # Portkey Integration Portkey provides observability and monitoring capabilities for AI integration with Potpie. -- **Sign Up**: Create a free account at [Portkey](https://app.portkey.ai/signup) and keep your API key in .env as PORTKEY_API_KEY. +- **Sign Up**: Create a free account at [Portkey](https://app.portkey.ai/signup) and keep your API key in `.env` as `PORTKEY_API_KEY`. + --- # Setting Up GitHub App To enable login via GitHub, create a GitHub app by following these steps: @@ -29,31 +40,114 @@ To enable login via GitHub, create a GitHub app by following these steps: - Webhook: Read Only - **Organization Permissions**: Members : Read Only - **Account Permissions**: Email Address: Read Only -4. **Generate a Private Key**: Download the private key and add it to env under `GITHUB_PRIVATE_KEY`. Add your app ID to `GITHUB_APP_ID`. -5. **Install the App**: From the left sidebar, select **Install App** and install it next to your organization/user account. + - **Homepage URL**: https://potpie.ai + - **Webhook**: Inactive +4. **Generate a Private Key**: Download the private key and place it in the project root. Add your app ID to `GITHUB_APP_ID`. +5. **Format your Private Key**: Use the `format_pem.sh` to format your key: + ```bash + chmod +x format_pem.sh + ./format_pem.sh your-key.pem + ``` + The formatted key will be displayed in the terminal. Copy the formatted key and add it to `.env` under `GITHUB_PRIVATE_KEY`. +6. **Install the App**: From the left sidebar, select **Install App** and install it next to your organization/user account. +7. **Create a GitHub Token**: Go to your GitHub Settings > Developer Settings > Personal Access Tokens > Tokens (classic). Add the token to your `.env` file under `GH_TOKEN_LIST`. + --- # Enabling GitHub Auth on Firebase 1. Open Firebase and navigate to **Authentication**. 2. Enable GitHub sign-in capability by adding a GitHub OAuth app from your account. This will provide you with a client secret and client ID to add to Firebase. 3. Copy the callback URL from Firebase and add it to your GitHub app. + GitHub Auth with Firebase is now ready. + --- # Google Cloud Setup Potpie uses Google Secret Manager to securely manage API keys. If you created a Firebase app, a linked Google Cloud account will be automatically created. You can use that or create a new one as needed. + Follow these steps to set up the Secret Manager and Application Default Credentials (ADC) for Potpie: -1. Set up the Secret Manager. -2. Configure Application Default Credentials for local use. +1. **Install gcloud CLI**: Follow the official installation guide: + https://cloud.google.com/sdk/docs/install + + After installation, initialize the gcloud CLI: + ```bash + gcloud init + ``` + Say yes to configuring a default compute region. Select your local region when prompted. +2. **Set up the gcloud Secret Manager API**. +3. **Configure Application Default Credentials for local use**: + https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment + Once completed, you are ready to proceed with the Potpie setup. + --- -# Running Potpie +# Running Potpie (Production Setup) 1. **Ensure Docker is Installed**: Verify that Docker is installed and running on your system. 2. **Set Up the Environment**: Create a `.env` file based on the provided `.env.template` in the repository. This file should include all necessary configuration settings for the application. -3. **Google Cloud Authentication**: Log in to your Google Cloud account and set up Application Default Credentials (ADC). Detailed instructions can be found in the documentation. Alternatively place the service account key file for your gcp project in service-account.json file in the root of the codebase. -5. **Run Potpie**: Execute the following command: +3. **Google Cloud Authentication**: Log in to your Google Cloud account and set up Application Default Credentials (ADC). Detailed instructions can be found in the documentation. Alternatively, place the service account key file for your GCP project in the `service-account.json` file in the root of the codebase. +4. **Run Potpie**: Execute the following command: ```bash ./start.sh ``` You may need to make it executable by running: ```bash chmod +x start.sh - ``` \ No newline at end of file + ``` + +--- + +## Running Potpie Locally +**Install Python 3.10**: Download and install Python 3.10 from the official Python website: +https://www.python.org/downloads/release/python-3100/ + +1. **Ensure Docker is Installed**: Verify that Docker is installed and running on your system. +2. **Set Up the Environment**: Create a `.env` file based on the provided `.env.template` in the repository. This file should include all necessary configuration settings for the application. + Ensure that: + ``` + isDevelopmentMode=enabled + ENV=development + OPENAI_API_KEY= + ``` +3. **Create a Virtual Environment** using Python 3.10: + ```bash + python3.10 -m venv venv + source venv/bin/activate + ``` + Alternatively, you can also use the `virtualenv` library. + +4. **Install Dependencies** in your venv: + ```bash + pip install -r requirements.txt + ``` + If you face any issues with the dependencies, you can try installing them using the following command: + ```bash + pip install -r requirements.txt --use-deprecated=legacy-resolver + ``` + +5. You can use the following environment configuration to run Potpie with local models: + ``` + LLM_PROVIDER=ollama + LLM_API_KEY=ollama + LOW_REASONING_MODEL=ollama_chat/qwen2.5-coder:7b + HIGH_REASONING_MODEL=ollama_chat/qwen2.5-coder:7b + ``` + + To run Potpie with other models, you can use the following environment configuration: + ``` + LLM_PROVIDER=openrouter # any other provider supported by litellm + LLM_API_KEY=sk-or-your-key # your provider key + LOW_REASONING_MODEL=openrouter/deepseek/deepseek-chat # provider model name + HIGH_REASONING_MODEL=openrouter/deepseek/deepseek-chat # provider model name + ``` + + **`LOW_REASONING_MODEL`** and **`HIGH_REASONING_MODEL`** correspond to the models that will be used for generating knowledge graphs and for agent reasoning, respectively. These model names should be in the format of `provider/model_name` as expected by Litellm. For more information, refer to the [Litellm documentation](https://docs.litellm.ai/docs/providers). + +6. **Run Potpie**: Execute the following command: + ```bash + ./start.sh + ``` + You may need to make it executable by running: + ```bash + chmod +x start.sh + ``` + +7. Start using Potpie with your local codebases!