diff --git a/examples/agents/report-generator-agent/.env.baseai.example b/examples/agents/report-generator-agent/.env.baseai.example new file mode 100644 index 00000000..8c643651 --- /dev/null +++ b/examples/agents/report-generator-agent/.env.baseai.example @@ -0,0 +1,21 @@ +# !! SERVER SIDE ONLY !! +# Keep all your API keys secret — use only on the server side. + +# TODO: ADD: Both in your production and local env files. +# Langbase API key for your User or Org account. +# How to get this API key https://langbase.com/docs/api-reference/api-keys +LANGBASE_API_KEY= + +# TODO: ADD: LOCAL ONLY. Add only to local env files. +# Following keys are needed for local pipe runs. For providers you are using. +# For Langbase, please add the key to your LLM keysets. +# Read more: Langbase LLM Keysets https://langbase.com/docs/features/keysets +OPENAI_API_KEY= +ANTHROPIC_API_KEY= +COHERE_API_KEY= +FIREWORKS_API_KEY= +GOOGLE_API_KEY= +GROQ_API_KEY= +MISTRAL_API_KEY= +PERPLEXITY_API_KEY= +TOGETHER_API_KEY= diff --git a/examples/agents/report-generator-agent/.gitignore b/examples/agents/report-generator-agent/.gitignore new file mode 100644 index 00000000..7270a2e7 --- /dev/null +++ b/examples/agents/report-generator-agent/.gitignore @@ -0,0 +1,10 @@ +# baseai +**/.baseai/ +node_modules +.env +package-lock.json +pnpm-lock.yaml +# env file +.env +# env file +.env diff --git a/examples/agents/report-generator-agent/README.md b/examples/agents/report-generator-agent/README.md new file mode 100644 index 00000000..5d53d3e6 --- /dev/null +++ b/examples/agents/report-generator-agent/README.md @@ -0,0 +1,53 @@ +![Report Generator Agent by ⌘ BaseAI][cover] + +![License: MIT][mit] [![Fork on ⌘ Langbase][fork]][pipe] + +## Build a Report Generator Agent with BaseAI framework — ⌘ Langbase + +This AI Agent is built using the BaseAI framework. It leverages an agentic pipe that integrates over 30+ LLMs (including OpenAI, Gemini, Mistral, Llama, Gemma, etc.) and can handle any data, with context sizes of up to 10M+ tokens, supported by memory. The framework is compatible with any front-end framework (such as React, Remix, Astro, Next.js), giving you, as a developer, the freedom to tailor your AI application exactly as you envision. + +## Features + +- Report Generator Agent — Built with [BaseAI framework and agentic Pipe ⌘ ][qs]. +- Composable Agents — build and compose agents with BaseAI. +- Add and Sync deployed pipe on Langbase locally npx baseai@latest add ([see the Code button][pipe]). + +## Learn more + +1. Check the [Learning path to build an agentic AI pipe with ⌘ BaseAI][learn] +2. Read the [source code on GitHub][gh] for this agent example +3. Go through Documentaion: [Pipe Quick Start][qs] +4. Learn more about [Memory features in ⌘ BaseAI][memory] +5. Learn more about [Tool calls support in ⌘ BaseAI][toolcalls] + + +> NOTE: +> This is a BaseAI project, you can deploy BaseAI pipes, memory and tool calls on Langbase. + +--- + +## Authors + +This project is created by [Langbase][lb] team members, with contributions from: + +- Muhammad-Ali Danish - Software Engineer, [Langbase][lb]
+**_Built by ⌘ [Langbase.com][lb] — Ship hyper-personalized AI assistants with memory!_** + + +[lb]: https://langbase.com +[pipe]: https://langbase.com/examples/report-generator-agent +[gh]: https://github.com/LangbaseInc/baseai/tree/main/examples/agents/report-generator-agent +[cover]:https://raw.githubusercontent.com/LangbaseInc/docs-images/main/baseai/baseai-cover.png +[download]:https://download-directory.github.io/?url=https://github.com/LangbaseInc/baseai/tree/main/examples/agents/report-generator-agent +[learn]:https://baseai.dev/learn +[memory]:https://baseai.dev/docs/memory/quickstart +[toolcalls]:https://baseai.dev/docs/tools/quickstart +[deploy]:https://baseai.dev/docs/deployment/authentication +[signup]: https://langbase.fyi/io +[qs]:https://baseai.dev/docs/pipe/quickstart +[docs]:https://baseai.dev/docs +[xaa]:https://x.com/MrAhmadAwais +[xab]:https://x.com/AhmadBilalDev +[local]:http://localhost:9000 +[mit]: https://img.shields.io/badge/license-MIT-blue.svg?style=for-the-badge&color=%23000000 +[fork]: https://img.shields.io/badge/FORK%20ON-%E2%8C%98%20Langbase-000000.svg?style=for-the-badge&logo=%E2%8C%98%20Langbase&logoColor=000000 \ No newline at end of file diff --git a/examples/agents/report-generator-agent/baseai/baseai.config.ts b/examples/agents/report-generator-agent/baseai/baseai.config.ts new file mode 100644 index 00000000..3c0328bc --- /dev/null +++ b/examples/agents/report-generator-agent/baseai/baseai.config.ts @@ -0,0 +1,18 @@ +import type { BaseAIConfig } from 'baseai'; + +export const config: BaseAIConfig = { + log: { + isEnabled: false, + logSensitiveData: false, + pipe: true, + 'pipe.completion': true, + 'pipe.request': true, + 'pipe.response': true, + tool: true, + memory: true + }, + memory: { + useLocalEmbeddings: false + }, + envFilePath: '.env' +}; diff --git a/examples/agents/report-generator-agent/baseai/memory/report-analysis/index.ts b/examples/agents/report-generator-agent/baseai/memory/report-analysis/index.ts new file mode 100644 index 00000000..8b4519d2 --- /dev/null +++ b/examples/agents/report-generator-agent/baseai/memory/report-analysis/index.ts @@ -0,0 +1,8 @@ +import { MemoryI } from '@baseai/core'; + +const memoryReportAnalysis = (): MemoryI => ({ + name: 'report-analysis', + description: 'memory used report generator', +}); + +export default memoryReportAnalysis; diff --git a/examples/agents/report-generator-agent/baseai/pipes/report-generator-agent.ts b/examples/agents/report-generator-agent/baseai/pipes/report-generator-agent.ts new file mode 100644 index 00000000..83b17d4c --- /dev/null +++ b/examples/agents/report-generator-agent/baseai/pipes/report-generator-agent.ts @@ -0,0 +1,45 @@ +import { PipeI } from '@baseai/core'; +import memoryReportAnalysis from '../memory/report-analysis'; + +const pipeReportGeneratorAgent = (): PipeI => ({ + // Replace with your API key https://langbase.com/docs/api-reference/api-keys + apiKey: process.env.LANGBASE_API_KEY!, + name: 'report-generator-agent', + description: + 'Report generator parses different aspects of attached report with the help of Langbase Pipes.', + status: 'private', + model: 'openai:gpt-4o-mini', + stream: true, + json: false, + store: true, + moderate: true, + top_p: 1, + max_tokens: 3000, + temperature: 0.7, + presence_penalty: 1, + frequency_penalty: 1, + stop: [], + tool_choice: 'auto', + parallel_tool_calls: true, + messages: [ + { + role: 'system', + content: "You are a report generator AI Assistant and an Agent which generates concise report based on user's requested topic from the CONTEXT. If the request topic is found in the CONTEXT you reply to the user about the topic otherwise you reploy with a 3 sentence summary what the attached document in CONTEXT is about and ask politely the user if they want to generate a concise report based on the summary presented.\n\nBased on the content you have classify if the attached document is about sales and market then choose \"Guidelines on report generation based on Sales and Marketing document\" for report generation guidelines and if the attached document is about white paper or research paper choose \"Guidelines on report generation based on Research paper\" for report generation guidelines. For any other document theme you can apply guidelines similar to \"Guidelines on report generation based on Research paper\".\n\nGuidelines on report generation based on Research paper:\n- Insights into the keytopics taken from summary.\n- From the extract important keyword that has a strong relationship in the topic domain present clearly and concisely in bullet points the insights.\n- Based on above information presented to the user, provide future directions and recommendations.\n\nGuidelines on report generation based on Sales and Marketing document:\n- Insights into the keytopics taken from summary.\n- From the extract important keyword that has a strong relationship in the topic domain present clearly and concisely in bullet points the insights on sales numbers and market data.\n- Based on above information presented to the user, provide sales prediction, based on market strategy if this is not applicable then provide data driven insights." + + }, + { name: 'json', role: 'system', content: '' }, + { name: 'safety', role: 'system', content: '' }, + { + name: 'opening', + role: 'system', + content: 'Welcome to Langbase. Prompt away!' + }, + { name: 'rag', role: 'system', content: "Below is some CONTEXT for you to answer the questions. ONLY answer from the CONTEXT. CONTEXT consists of multiple information chunks. Each chunk has a source mentioned at the end.\n\nFor each piece of response you provide, cite the source in brackets like so: [1].\n\nAt the end of the answer, always list each source with its corresponding number and provide the document name. like so [1] Filename.doc.\n\nIf you don't know the answer, just say that you don't know. Ask for more context and better questions if needed." } + + ], + variables: [], + memory: [memoryReportAnalysis()], + tools: [] +}); + +export default pipeReportGeneratorAgent; diff --git a/examples/agents/report-generator-agent/index.ts b/examples/agents/report-generator-agent/index.ts new file mode 100644 index 00000000..b658dcc6 --- /dev/null +++ b/examples/agents/report-generator-agent/index.ts @@ -0,0 +1,57 @@ +import 'dotenv/config'; +import { Pipe } from '@baseai/core'; +import inquirer from 'inquirer'; +import ora from 'ora'; +import chalk from 'chalk'; +import pipeReportGeneratorAgent from './baseai/pipes/report-generator-agent'; + +const pipe = new Pipe(pipeReportGeneratorAgent()); + +async function main() { + + const initialSpinner = ora('Checking attached content type...').start(); + try { + const { completion: initialReportAgentResponse } = await pipe.run({ + messages: [{ role: 'user', content: 'Summarize the content in 3 lines and present 10 important keywords\ + present in the attached document found in the CONTEXT' }], + }); + initialSpinner.stop(); + console.log(chalk.cyan('Report Generator Agent response...')); + console.log(initialReportAgentResponse); + } catch (error) { + initialSpinner.stop(); + console.error(chalk.red('Error processing initial request:'), error); + } + + while (true) { + const { userMsg } = await inquirer.prompt([ + { + type: 'input', + name: 'userMsg', + message: chalk.blue('Enter your query (or type "exit" to quit):'), + }, + ]); + + if (userMsg.toLowerCase() === 'exit') { + console.log(chalk.green('Goodbye!')); + break; + } + + const spinner = ora('Processing your request...').start(); + + try { + const { completion: reportAgentResponse } = await pipe.run({ + messages: [{ role: 'user', content: userMsg }], + }); + + spinner.stop(); + console.log(chalk.cyan('Agent:')); + console.log(reportAgentResponse); + } catch (error) { + spinner.stop(); + console.error(chalk.red('Error processing your request:'), error); + } + } +} + +main(); \ No newline at end of file diff --git a/examples/agents/report-generator-agent/package.json b/examples/agents/report-generator-agent/package.json new file mode 100644 index 00000000..8fe65dec --- /dev/null +++ b/examples/agents/report-generator-agent/package.json @@ -0,0 +1,22 @@ +{ + "name": "report-generator-agent", + "version": "1.0.0", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1", + "baseai": "baseai" + }, + "keywords": [], + "author": "", + "license": "ISC", + "description": "", + "dependencies": { + "@baseai/core": "^0.9.3", + "dotenv": "^16.4.5", + "inquirer": "^12.0.0", + "ora": "^8.1.0" + }, + "devDependencies": { + "baseai": "^0.9.3" + } +}