Skip to content

Commit 8b1b688

Browse files
committed
chore: add trading-volume-example for testing
1 parent c81ed2f commit 8b1b688

File tree

21 files changed

+1002
-165
lines changed

21 files changed

+1002
-165
lines changed

Cargo.lock

Lines changed: 4 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,9 @@ futures = "0.3.31"
3333
serde_json = "1.0.140"
3434
reqwest = "0.12.15"
3535
tempfile = "3.13"
36+
rand = "0.8"
3637

38+
parity-scale-codec = { version = "3", features = ["derive"] }
3739
pico-sdk = { git = "https://github.com/brevis-network/pico", features = ["coprocessor"] }
3840
pico-vm = { git = "https://github.com/brevis-network/pico" }
41+
coprocessor-sdk = { git = "https://github.com/brevis-network/Pico-zkCoprocessor" }

pico-coprocessor-service-bin/build.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
use blueprint_sdk::build;
22
use blueprint_sdk::tangle::blueprint;
3+
use pico_coprocessor_service_blueprint_lib::{generate_coprocessor_proof, generate_proof};
34
use std::path::Path;
45
use std::process;
5-
use pico_coprocessor_service_blueprint_lib::say_hello;
66

77
fn main() {
88
// Automatically update dependencies with `soldeer` (if available), and build the contracts.
@@ -22,7 +22,7 @@ fn main() {
2222
name: "experiment",
2323
master_manager_revision: "Latest",
2424
manager: { Evm = "HelloBlueprint" },
25-
jobs: [say_hello]
25+
jobs: [generate_proof, generate_coprocessor_proof]
2626
};
2727

2828
match blueprint {
@@ -31,12 +31,12 @@ fn main() {
3131
let json = blueprint_sdk::tangle::metadata::macros::ext::serde_json::to_string_pretty(
3232
&blueprint,
3333
)
34-
.unwrap();
34+
.unwrap();
3535
std::fs::write(
3636
Path::new(env!("CARGO_WORKSPACE_DIR")).join("blueprint.json"),
3737
json.as_bytes(),
3838
)
39-
.unwrap();
39+
.unwrap();
4040
}
4141
Err(e) => {
4242
println!("cargo::error={e:?}");
Lines changed: 103 additions & 71 deletions
Original file line numberDiff line numberDiff line change
@@ -1,98 +1,130 @@
1-
use blueprint_sdk::Job;
2-
use blueprint_sdk::Router;
3-
use blueprint_sdk::contexts::tangle::TangleClientContext;
4-
use blueprint_sdk::crypto::sp_core::SpSr25519;
5-
use blueprint_sdk::crypto::tangle_pair_signer::TanglePairSigner;
6-
use blueprint_sdk::keystore::backends::Backend;
7-
use blueprint_sdk::runner::BlueprintRunner;
8-
use blueprint_sdk::runner::config::BlueprintEnvironment;
9-
use blueprint_sdk::runner::tangle::config::TangleConfig;
10-
use blueprint_sdk::tangle::consumer::TangleConsumer;
11-
use blueprint_sdk::tangle::filters::MatchesServiceId;
12-
use blueprint_sdk::tangle::layers::TangleLayer;
13-
use blueprint_sdk::tangle::producer::TangleProducer;
14-
use pico_coprocessor_service_blueprint_lib::{MyContext, SAY_HELLO_JOB_ID, say_hello};
1+
// pico-coprocessor-service-bin/src/main.rs
2+
use blueprint_sdk::{
3+
Job,
4+
Router, // Ensure Job and Router are imported
5+
alloy::primitives::Address, // Import Address
6+
contexts::tangle::TangleClientContext,
7+
crypto::{sp_core::SpSr25519, tangle_pair_signer::TanglePairSigner},
8+
keystore::backends::Backend,
9+
runner::{BlueprintRunner, config::BlueprintEnvironment, tangle::config::TangleConfig},
10+
tangle::{
11+
consumer::TangleConsumer, filters::MatchesServiceId, layers::TangleLayer,
12+
producer::TangleProducer,
13+
},
14+
};
15+
// Import new types and jobs from lib
16+
use pico_coprocessor_service_blueprint_lib::{
17+
GENERATE_COPROCESSOR_PROOF_JOB_ID,
18+
GENERATE_PROOF_JOB_ID,
19+
ServiceContext,
20+
generate_coprocessor_proof,
21+
generate_proof,
22+
say_hello, // Jobs
23+
};
24+
use std::{path::PathBuf, str::FromStr}; // For PathBuf and FromStr
1525
use tower::filter::FilterLayer;
1626
use tracing::error;
1727
use tracing::level_filters::LevelFilter;
28+
use url::Url; // For parsing RPC URL
1829

1930
#[tokio::main]
20-
async fn main() -> Result<(), blueprint_sdk::Error> {
31+
async fn main() -> Result<(), Box<dyn std::error::Error>> {
32+
// Use Box<dyn Error> for broader error handling
2133
setup_log();
34+
tracing::info!("Starting Pico Coprocessor Service Blueprint Runner...");
2235

23-
let env = BlueprintEnvironment::load()?;
36+
// --- Load Configuration ---
37+
let env = BlueprintEnvironment::load()
38+
.map_err(|e| format!("Failed to load blueprint environment: {}", e))?;
39+
40+
// Tangle Signer Setup
2441
let sr25519_signer = env.keystore().first_local::<SpSr25519>()?;
2542
let sr25519_pair = env.keystore().get_secret::<SpSr25519>(&sr25519_signer)?;
26-
let st25519_signer = TanglePairSigner::new(sr25519_pair.0);
43+
let tangle_signer = TanglePairSigner::new(sr25519_pair.0);
44+
tracing::info!("Tangle signer configured.");
2745

46+
// Tangle Client Setup
2847
let tangle_client = env.tangle_client().await?;
2948
let tangle_producer =
3049
TangleProducer::finalized_blocks(tangle_client.rpc_client.clone()).await?;
31-
let tangle_consumer = TangleConsumer::new(tangle_client.rpc_client.clone(), st25519_signer);
32-
33-
let tangle_config = TangleConfig::default();
34-
35-
let service_id = env.protocol_settings.tangle()?.service_id.unwrap();
36-
let result = BlueprintRunner::builder(tangle_config, env)
37-
.router(
38-
// A router
39-
//
40-
// Each "route" is a job ID and the job function. We can also support arbitrary `Service`s from `tower`,
41-
// which may make it easier for people to port over existing services to a blueprint.
42-
Router::new()
43-
// The route defined here has a `TangleLayer`, which adds metadata to the
44-
// produced `JobResult`s, making it visible to a `TangleConsumer`.
45-
.route(SAY_HELLO_JOB_ID, say_hello.layer(TangleLayer))
46-
// Add the `FilterLayer` to filter out job calls that don't match the service ID
47-
//
48-
// This layer is global to the router, and is applied to every job call.
49-
.layer(FilterLayer::new(MatchesServiceId(service_id)))
50-
// We can add a context to the router, which will be passed to all job functions
51-
// that have the `Context` extractor.
52-
//
53-
// A context can be used for global state between job calls, such as a database.
54-
//
55-
// It is important to note that the context is **cloned** for each job call, so
56-
// the context must be cheaply cloneable.
57-
.with_context(MyContext::new()),
58-
)
59-
// Add potentially many producers
60-
//
61-
// A producer is simply a `Stream` that outputs `JobCall`s, which are passed down to the intended
62-
// job functions.
50+
let tangle_consumer = TangleConsumer::new(tangle_client.rpc_client.clone(), tangle_signer);
51+
tracing::info!("Tangle producer and consumer configured.");
52+
53+
let tangle_config = env.protocol_settings.tangle()?.clone(); // Use loaded config
54+
let service_id = tangle_config
55+
.service_id
56+
.ok_or("Tangle Service ID not configured")?;
57+
tracing::info!(%service_id, "Using Tangle Service ID");
58+
59+
// --- Service Specific Configuration ---
60+
// Get these from environment variables or a config file via BlueprintEnvironment extensions
61+
// Example using environment variables (add error handling)
62+
let eth_rpc_env =
63+
std::env::var("ETH_RPC_URL").map_err(|_| "ETH_RPC_URL environment variable not set")?;
64+
let eth_rpc_url =
65+
Url::parse(&eth_rpc_env).map_err(|e| format!("Invalid ETH_RPC_URL: {}", e))?;
66+
67+
let registry_addr_env = std::env::var("REGISTRY_CONTRACT_ADDRESS")
68+
.map_err(|_| "REGISTRY_CONTRACT_ADDRESS environment variable not set")?;
69+
let registry_contract_address = Address::from_str(&registry_addr_env)
70+
.map_err(|e| format!("Invalid REGISTRY_CONTRACT_ADDRESS: {}", e))?;
71+
72+
let temp_dir_base_env =
73+
std::env::var("TEMP_DIR_BASE").unwrap_or_else(|_| "/tmp/pico-service".to_string());
74+
let temp_dir_base = PathBuf::from(temp_dir_base_env);
75+
76+
tracing::info!(rpc_url = %eth_rpc_url, registry = %registry_contract_address, temp_dir = ?temp_dir_base, "Service configuration loaded");
77+
78+
// --- Create Service Context ---
79+
let service_context =
80+
ServiceContext::new(eth_rpc_url, registry_contract_address, temp_dir_base)
81+
.map_err(|e| format!("Failed to create service context: {:?}", e))?;
82+
tracing::info!("Service context created.");
83+
84+
// --- Build Router ---
85+
let router = Router::new()
86+
// Add routes for each job ID
87+
.route(GENERATE_PROOF_JOB_ID, generate_proof.layer(TangleLayer))
88+
.route(
89+
GENERATE_COPROCESSOR_PROOF_JOB_ID,
90+
generate_coprocessor_proof.layer(TangleLayer),
91+
) // Add new route
92+
// Global filter layer
93+
.layer(FilterLayer::new(MatchesServiceId(service_id)))
94+
// Add the shared context
95+
.with_context(service_context);
96+
tracing::info!("Router configured with {} jobs.", 3); // Update count
97+
98+
// --- Build and Run Runner ---
99+
let runner_result = BlueprintRunner::builder(tangle_config, env)
100+
.router(router)
63101
.producer(tangle_producer)
64-
// Add potentially many consumers
65-
//
66-
// A consumer is simply a `Sink` that consumes `JobResult`s, which are the output of the job functions.
67-
// Every result will be passed to every consumer. It is the responsibility of the consumer
68-
// to determine whether or not to process a result.
69102
.consumer(tangle_consumer)
70-
// Custom shutdown handlers
71-
//
72-
// Now users can specify what to do when an error occurs and the runner is shutting down.
73-
// That can be cleanup logic, finalizing database transactions, etc.
74-
.with_shutdown_handler(async { println!("Shutting down!") })
103+
.with_shutdown_handler(async { println!("Shutting down Pico Coprocessor Service!") })
75104
.run()
76105
.await;
77106

78-
if let Err(e) = result {
79-
error!("Runner failed! {e:?}");
107+
if let Err(e) = runner_result {
108+
error!("Blueprint runner failed: {:?}", e);
109+
// Convert specific blueprint errors if needed, otherwise return the boxed error
110+
return Err(e.into());
80111
}
81112

113+
tracing::info!("Blueprint runner finished successfully.");
82114
Ok(())
83115
}
84116

85117
pub fn setup_log() {
86118
use tracing_subscriber::util::SubscriberInitExt;
119+
let filter = tracing_subscriber::EnvFilter::builder()
120+
.with_default_directive(LevelFilter::INFO.into())
121+
.from_env_lossy();
87122

88-
let _ = tracing_subscriber::fmt::SubscriberBuilder::default()
89-
.without_time()
90-
.with_span_events(tracing_subscriber::fmt::format::FmtSpan::NONE)
91-
.with_env_filter(
92-
tracing_subscriber::EnvFilter::builder()
93-
.with_default_directive(LevelFilter::INFO.into())
94-
.from_env_lossy(),
95-
)
96-
.finish()
97-
.try_init();
123+
let _ = tracing_subscriber::fmt() //.SubscriberBuilder::default()
124+
// .without_time() // Keep time for debugging
125+
.with_span_events(tracing_subscriber::fmt::format::FmtSpan::CLOSE) // Show span duration
126+
.with_env_filter(filter)
127+
// .finish() // finish called by init
128+
.try_init(); // Use try_init to avoid panic if already initialized
129+
tracing::info!("Logging initialized.");
98130
}

pico-coprocessor-service-lib/Cargo.toml

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,12 +20,14 @@ serde = { workspace = true }
2020
url = { workspace = true }
2121
hex = { workspace = true }
2222
futures = { workspace = true }
23+
coprocessor-sdk = { workspace = true }
2324
pico-sdk = { workspace = true, features = ["coprocessor"] }
2425
pico-vm = { workspace = true }
2526
serde_json = { workspace = true }
2627
reqwest = { workspace = true, features = ["stream"] }
2728
tempfile.workspace = true
28-
rand = "0.8"
29+
rand = { workspace = true }
30+
parity-scale-codec = { workspace = true }
2931

3032
[dev-dependencies]
3133
blueprint-sdk = { workspace = true, features = ["testing", "tangle"] }

0 commit comments

Comments
 (0)