Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions dabgent/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions dabgent/dabgent_agent/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ edition = "2024"
[dependencies]
tokio = { version = "1", features = ["full"] }
serde = { version = "1", features = ["derive"] }
async-trait = "0.1"
eyre = "0.6"
chrono = { version = "0.4", features = ["serde"] }
serde_json = "1"
Expand Down
3 changes: 2 additions & 1 deletion dabgent/dabgent_agent/examples/basic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,10 @@ pub async fn pipeline_fn(stream_id: &str, store: impl EventStore) -> Result<()>
store.clone(),
);
let tool_processor = ToolProcessor::new(sandbox.boxed(), store.clone(), tools, None);
let completion_processor = dabgent_agent::processor::CompletionProcessor::new(store.clone());
let pipeline = Pipeline::new(
store.clone(),
vec![thread_processor.boxed(), tool_processor.boxed()],
vec![thread_processor.boxed(), tool_processor.boxed(), completion_processor.boxed()],
);
pipeline.run(stream_id.clone()).await?;
Ok(())
Expand Down
68 changes: 12 additions & 56 deletions dabgent/dabgent_agent/examples/databricks_explorer.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
use dabgent_agent::processor::{CompactProcessor, Pipeline, Processor, ThreadProcessor, ToolProcessor};
use dabgent_agent::processor::{DelegationProcessor, Pipeline, Processor, ThreadProcessor, ToolProcessor};
use dabgent_agent::processor::delegation::compaction::CompactionHandler;
use dabgent_agent::toolbox::{databricks::databricks_toolset, ToolDyn};
use dabgent_mq::{EventStore, create_store, StoreConfig};
use dabgent_sandbox::Sandbox;
use dabgent_sandbox::{Sandbox, NoOpSandbox};
use eyre::Result;
use rig::client::ProviderClient;

Expand Down Expand Up @@ -53,23 +54,28 @@ async fn main() -> Result<()> {
// Set up processors
let thread_processor = ThreadProcessor::new(llm, store.clone());
let tool_processor = ToolProcessor::new(
DummySandbox::new().boxed(),
NoOpSandbox::new().boxed(),
store.clone(),
tools,
None,
);
let compact_processor = CompactProcessor::new(

// Set up delegation processor with compaction handler
let compaction_handler = CompactionHandler::new(2048)?; // Compact threshold
let delegation_processor = DelegationProcessor::new(
store.clone(),
2048, // Compact threshold - keep context manageable
MODEL.to_string(),
vec![Box::new(compaction_handler)],
);

let completion_processor = dabgent_agent::processor::CompletionProcessor::new(store.clone());
let pipeline = Pipeline::new(
store,
vec![
thread_processor.boxed(),
tool_processor.boxed(),
compact_processor.boxed(),
completion_processor.boxed(),
delegation_processor.boxed(),
],
);

Expand All @@ -91,56 +97,6 @@ Please help me locate tables that contain bakery or food sales information. I'm
Can you explore the Unity Catalog and tell me what bakery-related sales data is available?
"#;

/// Dummy sandbox for Databricks tools that don't need actual file operations
struct DummySandbox;

impl DummySandbox {
fn new() -> Self {
Self
}
}

impl Sandbox for DummySandbox {
async fn exec(&mut self, _command: &str) -> Result<dabgent_sandbox::ExecResult> {
Ok(dabgent_sandbox::ExecResult {
exit_code: 0,
stdout: String::new(),
stderr: String::new(),
})
}

async fn write_file(&mut self, _path: &str, _content: &str) -> Result<()> {
Ok(())
}

async fn write_files(&mut self, _files: Vec<(&str, &str)>) -> Result<()> {
Ok(())
}

async fn read_file(&self, _path: &str) -> Result<String> {
Ok(String::new())
}

async fn delete_file(&mut self, _path: &str) -> Result<()> {
Ok(())
}

async fn list_directory(&self, _path: &str) -> Result<Vec<String>> {
Ok(Vec::new())
}

async fn set_workdir(&mut self, _path: &str) -> Result<()> {
Ok(())
}

async fn export_directory(&self, _container_path: &str, _host_path: &str) -> Result<String> {
Ok(String::new())
}

async fn fork(&self) -> Result<DummySandbox> {
Ok(DummySandbox)
}
}

async fn push_llm_config<S: EventStore>(
store: &S,
Expand Down
6 changes: 4 additions & 2 deletions dabgent/dabgent_agent/examples/planning.rs
Original file line number Diff line number Diff line change
Expand Up @@ -95,9 +95,10 @@ pub async fn planning_pipeline(stream_id: &str, store: impl EventStore + Clone,
Some("planner".to_string()),
);

let planning_completion_processor = dabgent_agent::processor::CompletionProcessor::new(store.clone());
let planning_pipeline = Pipeline::new(
store.clone(),
vec![planning_thread.boxed(), planning_tool_processor.boxed()],
vec![planning_thread.boxed(), planning_tool_processor.boxed(), planning_completion_processor.boxed()],
);

let pipeline_handle = tokio::spawn({
Expand Down Expand Up @@ -155,9 +156,10 @@ pub async fn planning_pipeline(stream_id: &str, store: impl EventStore + Clone,
None,
);

let execution_completion_processor = dabgent_agent::processor::CompletionProcessor::new(store.clone());
let execution_pipeline = Pipeline::new(
store.clone(),
vec![execution_thread.boxed(), execution_tool_processor.boxed()],
vec![execution_thread.boxed(), execution_tool_processor.boxed(), execution_completion_processor.boxed()],
);

let exec_handle = tokio::spawn({
Expand Down
18 changes: 17 additions & 1 deletion dabgent/dabgent_agent/src/event.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,10 @@ pub struct ParentAggregate {
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub enum ToolKind {
Done,
Other(String),
ExploreDatabricksCatalog,
FinishDelegation,
CompactError,
Regular(String),
}

#[derive(Debug, Clone, Serialize, Deserialize)]
Expand Down Expand Up @@ -40,6 +43,7 @@ pub enum Event {
ArtifactsCollected(HashMap<String, String>),
TaskCompleted {
success: bool,
summary: String,
},
SeedSandboxFromTemplate {
template_path: String,
Expand All @@ -58,6 +62,16 @@ pub enum Event {
PlanUpdated {
tasks: Vec<String>,
},
DelegateWork {
agent_type: String,
prompt: String,
parent_tool_id: String,
},
WorkComplete {
agent_type: String,
result: String,
parent: ParentAggregate,
},
}

impl dabgent_mq::Event for Event {
Expand All @@ -76,6 +90,8 @@ impl dabgent_mq::Event for Event {
Event::PipelineShutdown => "pipeline_shutdown",
Event::PlanCreated { .. } => "plan_created",
Event::PlanUpdated { .. } => "plan_updated",
Event::DelegateWork { .. } => "delegate_work",
Event::WorkComplete { .. } => "work_complete",
}
}
}
21 changes: 19 additions & 2 deletions dabgent/dabgent_agent/src/llm.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@ use std::pin::Pin;
use std::time::Duration;
use tokio::time::sleep;

const MAX_COMPLETION_ATTEMPTS: usize = 4;
const MAX_COMPLETION_ATTEMPTS: usize = 7;
const BASE_BACKOFF_MS: u64 = 250;
const MAX_BACKOFF_MS: u64 = 5000;
const MAX_BACKOFF_MS: u64 = 10000;

fn backoff_delay_ms(attempt: usize) -> u64 {
let exp = BASE_BACKOFF_MS.saturating_mul(1 << (attempt.saturating_sub(1)));
Expand Down Expand Up @@ -178,6 +178,23 @@ impl<C: LLMClient> RetryingLLM<C> {

impl<C: LLMClient> LLMClient for RetryingLLM<C> {
async fn completion(&self, completion: Completion) -> eyre::Result<CompletionResponse> {
// Log payload information before making LLM call
let history_size = completion.history.len();
let tools_count = completion.tools.len();
let prompt_size = serde_json::to_string(&completion.prompt).map(|s| s.len()).unwrap_or(0);
let total_history_size = completion.history.iter()
.map(|m| serde_json::to_string(m).map(|s| s.len()).unwrap_or(0))
.sum::<usize>();

tracing::info!(
model = %completion.model,
history_messages = history_size,
tools_count = tools_count,
prompt_size_bytes = prompt_size,
total_history_size_bytes = total_history_size,
"Starting LLM completion request"
);

for attempt in 1..=self.max_attempts {
match self.inner.completion(completion.clone()).await {
Ok(resp) => return Ok(resp),
Expand Down
Loading
Loading