Skip to content

wip: comment out prepend full_text #3079

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 1 commit into
base: main
Choose a base branch
from
Draft
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
47 changes: 23 additions & 24 deletions router/src/server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -295,10 +295,10 @@ pub(crate) async fn generate_internal(
);

let compute_characters = req.inputs.chars().count();
let mut add_prompt = None;
if req.parameters.return_full_text.unwrap_or(false) {
add_prompt = Some(req.inputs.clone());
}
//let mut add_prompt = None;
//if req.parameters.return_full_text.unwrap_or(false) {
// add_prompt = Some(req.inputs.clone());
//}

let details: bool = req.parameters.details || req.parameters.decoder_input_details;

Expand All @@ -321,10 +321,10 @@ pub(crate) async fn generate_internal(
.into_iter()
.map(|response: InferResponse| {
// Add prompt if return_full_text
let mut output_text = response.generated_text.text;
if let Some(prompt) = &add_prompt {
output_text = prompt.clone() + &output_text;
}
let output_text = response.generated_text.text.clone();
//if let Some(prompt) = &add_prompt {
// output_text = prompt.clone() + &output_text;
//}

BestOfSequence {
generated_text: output_text,
Expand Down Expand Up @@ -416,10 +416,10 @@ pub(crate) async fn generate_internal(
.record(response.generated_text.generated_tokens as f64);

// Send response
let mut output_text = response.generated_text.text;
if let Some(prompt) = add_prompt {
output_text = prompt + &output_text;
}
let output_text = response.generated_text.text.clone();
//if let Some(prompt) = add_prompt {
// output_text = prompt + &output_text;
//}

tracing::debug!("Output: {}", output_text);
tracing::info!("Success");
Expand Down Expand Up @@ -522,10 +522,10 @@ async fn generate_stream_internal(
let mut end_reached = false;
let mut error = false;

let mut add_prompt = None;
if req.parameters.return_full_text.unwrap_or(false) {
add_prompt = Some(req.inputs.clone());
}
//let mut add_prompt = None;
//if req.parameters.return_full_text.unwrap_or(false) {
// add_prompt = Some(req.inputs.clone());
//}
let details = req.parameters.details;

let best_of = req.parameters.best_of.unwrap_or(1);
Expand Down Expand Up @@ -616,10 +616,10 @@ async fn generate_stream_internal(
// StreamResponse
end_reached = true;

let mut output_text = generated_text.text;
if let Some(prompt) = add_prompt {
output_text = prompt + &output_text;
}
let output_text = generated_text.text;
//if let Some(prompt) = add_prompt {
// output_text = prompt + &output_text;
//}

tracing::debug!(parent: &span, "Output: {}", output_text);
tracing::info!(parent: &span, "Success");
Expand Down Expand Up @@ -1711,10 +1711,9 @@ pub async fn run(

// Shared API builder initialization
let api_builder = || {
let mut builder = ApiBuilder::new().with_progress(false);
if let Some(token) = authorization_token {
builder = builder.with_token(Some(token));
}
let mut builder = ApiBuilder::new()
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this chunk will be reversed, sorry it got in here

.with_progress(false)
.with_token(authorization_token);

if let Ok(cache_dir) = std::env::var("HUGGINGFACE_HUB_CACHE") {
builder = builder.with_cache_dir(cache_dir.into());
Expand Down