diff --git a/setup.py b/setup.py index 124b41671..f7c009bbb 100644 --- a/setup.py +++ b/setup.py @@ -49,7 +49,7 @@ extras["pipeline-image"] = ["imagehash>=4.2.1", "pillow>=7.1.2", "timm>=0.4.12"] -extras["pipeline-llm"] = ["litellm>=1.12.0", "llama-cpp-python>=0.2.20"] +extras["pipeline-llm"] = ["litellm>=1.15.8", "llama-cpp-python>=0.2.20"] extras["pipeline-text"] = ["fasttext>=0.9.2", "sentencepiece>=0.1.91"] diff --git a/src/python/txtai/pipeline/llm/litellm.py b/src/python/txtai/pipeline/llm/litellm.py index e79031a84..5709dd9ff 100644 --- a/src/python/txtai/pipeline/llm/litellm.py +++ b/src/python/txtai/pipeline/llm/litellm.py @@ -2,9 +2,6 @@ LiteLLM module """ -import os -import contextlib - # Conditional import try: import litellm as api @@ -35,11 +32,16 @@ def ismodel(path): # pylint: disable=W0702 if isinstance(path, str) and LITELLM: - with open(os.devnull, "w", encoding="utf-8") as f, contextlib.redirect_stdout(f): - try: - return api.get_llm_provider(path) - except: - return False + debug = api.suppress_debug_info + try: + # Suppress debug messages for this test + api.suppress_debug_info = True + return api.get_llm_provider(path) + except: + return False + finally: + # Restore debug info value to original value + api.suppress_debug_info = debug return False