Skip to content

fix(client): some minor code fixes... #1176

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions langfuse/api/core/jsonable_encoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,8 +98,7 @@ def jsonable_encoder(
try:
data = dict(obj)
except Exception as e:
errors: List[Exception] = []
errors.append(e)
errors: List[Exception] = [e]
try:
data = vars(obj)
except Exception as e:
Expand Down
20 changes: 14 additions & 6 deletions langfuse/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -1298,7 +1298,7 @@ def create_prompt(
name: str,
prompt: List[ChatMessageDict],
is_active: Optional[bool] = None, # deprecated
labels: List[str] = [],
labels: Optional[List[str]] = None,
tags: Optional[List[str]] = None,
type: Optional[Literal["chat"]],
config: Optional[Any] = None,
Expand All @@ -1312,7 +1312,7 @@ def create_prompt(
name: str,
prompt: str,
is_active: Optional[bool] = None, # deprecated
labels: List[str] = [],
labels: Optional[List[str]] = None,
tags: Optional[List[str]] = None,
type: Optional[Literal["text"]] = "text",
config: Optional[Any] = None,
Expand All @@ -1325,7 +1325,7 @@ def create_prompt(
name: str,
prompt: Union[str, List[ChatMessageDict]],
is_active: Optional[bool] = None, # deprecated
labels: List[str] = [],
labels: Optional[List[str]] = None,
tags: Optional[List[str]] = None,
type: Optional[Literal["chat", "text"]] = "text",
config: Optional[Any] = None,
Expand All @@ -1347,6 +1347,8 @@ def create_prompt(
TextPromptClient: The prompt if type argument is 'text'.
ChatPromptClient: The prompt if type argument is 'chat'.
"""
if labels is None:
labels = []
try:
self.log.debug(f"Creating prompt {name=}, {version=}, {labels=}")

Expand Down Expand Up @@ -1401,7 +1403,7 @@ def update_prompt(
*,
name: str,
version: int,
new_labels: List[str] = [],
new_labels: Optional[List[str]] = None,
):
"""Update an existing prompt version in Langfuse. The Langfuse SDK prompt cache is invalidated for all prompts witht he specified name.

Expand All @@ -1414,6 +1416,8 @@ def update_prompt(
Prompt: The updated prompt from the Langfuse API.

"""
if new_labels is None:
new_labels = []
updated_prompt = self.client.prompt_version.update(
name=name,
version=version,
Expand Down Expand Up @@ -3424,7 +3428,7 @@ def observe_llama_index(
run_name: str,
run_description: Optional[str] = None,
run_metadata: Optional[Any] = None,
llama_index_integration_constructor_kwargs: Optional[Dict[str, Any]] = {},
llama_index_integration_constructor_kwargs: Optional[Dict[str, Any]] = None,
):
"""Context manager for observing LlamaIndex operations linked to this dataset item.

Expand Down Expand Up @@ -3454,6 +3458,8 @@ def observe_llama_index(
Raises:
ImportError: If required modules for LlamaIndex integration are not available.
"""
if llama_index_integration_constructor_kwargs is None:
llama_index_integration_constructor_kwargs = {}
metadata = {
"dataset_item_id": self.id,
"run_name": run_name,
Expand Down Expand Up @@ -3513,7 +3519,7 @@ def get_llama_index_handler(
run_name: str,
run_description: Optional[str] = None,
run_metadata: Optional[Any] = None,
llama_index_integration_constructor_kwargs: Optional[Dict[str, Any]] = {},
llama_index_integration_constructor_kwargs: Optional[Dict[str, Any]] = None,
):
"""Create and get a llama-index callback handler linked to this dataset item.

Expand All @@ -3526,6 +3532,8 @@ def get_llama_index_handler(
Returns:
LlamaIndexCallbackHandler: An instance of LlamaIndexCallbackHandler linked to the dataset item.
"""
if llama_index_integration_constructor_kwargs is None:
llama_index_integration_constructor_kwargs = {}
metadata = {
"dataset_item_id": self.id,
"run_name": run_name,
Expand Down
8 changes: 6 additions & 2 deletions langfuse/decorators/langfuse_decorator.py
Original file line number Diff line number Diff line change
Expand Up @@ -289,10 +289,12 @@ def _prepare_call(
capture_input: bool,
is_method: bool = False,
func_args: Tuple = (),
func_kwargs: Dict = {},
func_kwargs: Optional[Dict] = None,
) -> Optional[
Union[StatefulSpanClient, StatefulTraceClient, StatefulGenerationClient]
]:
if func_kwargs is None:
func_kwargs = {}
try:
stack = _observation_stack_context.get().copy()
parent = stack[-1] if stack else None
Expand Down Expand Up @@ -392,8 +394,10 @@ def _get_input_from_func_args(
*,
is_method: bool = False,
func_args: Tuple = (),
func_kwargs: Dict = {},
func_kwargs: Optional[Dict] = None,
) -> Any:
if func_kwargs is None:
func_kwargs = {}
# Remove implicitly passed "self" or "cls" argument for instance or class methods
logged_args = func_args[1:] if is_method else func_args
raw_input = {
Expand Down
1 change: 1 addition & 0 deletions langfuse/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -847,6 +847,7 @@ def initialize(self):

return self._langfuse

@classmethod
def flush(cls):
cls._langfuse.flush()

Expand Down
4 changes: 2 additions & 2 deletions langfuse/parse_error.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@
}


def generate_error_message_fern(error: Error) -> str:
def generate_error_message_fern(error: Exception) -> str:
if isinstance(error, AccessDeniedError):
return errorResponseByCode.get(403, defaultErrorResponse)
elif isinstance(error, MethodNotAllowedError):
Expand All @@ -67,7 +67,7 @@ def generate_error_message_fern(error: Error) -> str:
return defaultErrorResponse


def handle_fern_exception(exception: Error) -> None:
def handle_fern_exception(exception: Exception) -> None:
log = logging.getLogger("langfuse")
log.debug(exception)
error_message = generate_error_message_fern(exception)
Expand Down
5 changes: 4 additions & 1 deletion langfuse/utils/error_logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,14 @@ def wrapper(*args, **kwargs):
return wrapper


def auto_decorate_methods_with(decorator, exclude: Optional[List[str]] = []):
def auto_decorate_methods_with(decorator, exclude: Optional[List[str]] = None):
"""Class decorator to automatically apply a given decorator to all
methods of a class.
"""

if exclude is None:
exclude = []

def class_decorator(cls):
for attr_name, attr_value in cls.__dict__.items():
if attr_name in exclude:
Expand Down