diff --git a/QEfficient/generation/cloud_infer.py b/QEfficient/generation/cloud_infer.py index 71cd61188..a8a7e4b24 100644 --- a/QEfficient/generation/cloud_infer.py +++ b/QEfficient/generation/cloud_infer.py @@ -47,7 +47,7 @@ def __init__( qpc_path: Union[Path, str], device_ids: Optional[List[int]] = None, activate: bool = True, - enable_debug_logs: bool = False, + enable_debug_logs: bool = True, ): """ Initialise for QAIC inference Session diff --git a/QEfficient/generation/text_generation_inference.py b/QEfficient/generation/text_generation_inference.py index 2dd485a5e..2c86d9ab5 100755 --- a/QEfficient/generation/text_generation_inference.py +++ b/QEfficient/generation/text_generation_inference.py @@ -316,7 +316,7 @@ def cloud_ai_100_exec_kv( prompts_txt_file_path: Optional[str] = None, device_id: Optional[List[int]] = None, generation_len: Optional[int] = None, - enable_debug_logs: bool = False, + enable_debug_logs: bool = True, stream: bool = True, write_io_dir: Optional[str] = None, automation=False, @@ -408,7 +408,7 @@ def __init__( full_batch_size: Optional[int] = None, ctx_len: Optional[int] = None, device_id: Optional[List[int]] = None, - enable_debug_logs: bool = False, + enable_debug_logs: bool = True, write_io_dir: Optional[str] = None, is_tlm: Optional[int] = None, ) -> None: @@ -902,7 +902,7 @@ def __init__( full_batch_size: Optional[int] = None, ctx_len: Optional[int] = None, device_id: Optional[List[int]] = None, - enable_debug_logs: bool = False, + enable_debug_logs: bool = True, write_io_dir: Optional[str] = None, is_tlm: bool = False, ) -> None: diff --git a/QEfficient/transformers/models/modeling_auto.py b/QEfficient/transformers/models/modeling_auto.py index d1fc61cee..fe467b7d7 100644 --- a/QEfficient/transformers/models/modeling_auto.py +++ b/QEfficient/transformers/models/modeling_auto.py @@ -1017,7 +1017,7 @@ def cloud_ai_100_generate( self, inputs: torch.Tensor, device_ids: List[int], - enable_debug_logs: bool = False, + enable_debug_logs: bool = True, generation_len: int = None, streamer: Optional[TextStreamer] = None, ) -> np.ndarray: diff --git a/examples/cpp_execution/text_inference_using_cpp.py b/examples/cpp_execution/text_inference_using_cpp.py index eadf5e601..5a2764d7c 100644 --- a/examples/cpp_execution/text_inference_using_cpp.py +++ b/examples/cpp_execution/text_inference_using_cpp.py @@ -146,7 +146,7 @@ def cloud_ai_100_exec_kv_cpp( prompts_txt_file_path: Optional[str] = None, device_id: Optional[List[int]] = None, generation_len: Optional[int] = None, - enable_debug_logs: bool = False, + enable_debug_logs: bool = True, stream: bool = True, full_batch_size: Optional[int] = None, ):