Skip to content

Commit

Permalink
fix lint
Browse files Browse the repository at this point in the history
  • Loading branch information
yrobla committed Jan 20, 2025
1 parent 3df8971 commit 96e20ac
Show file tree
Hide file tree
Showing 7 changed files with 16 additions and 12 deletions.
2 changes: 1 addition & 1 deletion src/codegate/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
"anthropic": "https://api.anthropic.com/v1",
"vllm": "http://localhost:8000", # Base URL without /v1 path
"ollama": "http://localhost:11434", # Default Ollama server URL
"lm_studio": "http://localhost:1234"
"lm_studio": "http://localhost:1234",
}


Expand Down
6 changes: 4 additions & 2 deletions src/codegate/pipeline/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -321,7 +321,8 @@ async def process(

class InputPipelineInstance:
def __init__(
self, pipeline_steps: List[PipelineStep], secret_manager: SecretsManager, is_fim: bool):
self, pipeline_steps: List[PipelineStep], secret_manager: SecretsManager, is_fim: bool
):
self.pipeline_steps = pipeline_steps
self.secret_manager = secret_manager
self.is_fim = is_fim
Expand Down Expand Up @@ -384,7 +385,8 @@ async def process_request(

class SequentialPipelineProcessor:
def __init__(
self, pipeline_steps: List[PipelineStep], secret_manager: SecretsManager, is_fim: bool):
self, pipeline_steps: List[PipelineStep], secret_manager: SecretsManager, is_fim: bool
):
self.pipeline_steps = pipeline_steps
self.secret_manager = secret_manager
self.is_fim = is_fim
Expand Down
8 changes: 5 additions & 3 deletions src/codegate/pipeline/codegate_context_retriever/codegate.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,10 +95,12 @@ async def process(
# in the rest of the user query/messsages
user_messages = re.sub(r"```.*?```", "", user_message, flags=re.DOTALL)
user_messages = re.sub(r"⋮...*?⋮...\n\n", "", user_messages, flags=re.DOTALL)
user_messages = re.sub(r"<environment_details>.*?</environment_details>", "", user_messages, flags=re.DOTALL)
user_messages = re.sub(
r"<environment_details>.*?</environment_details>", "", user_messages, flags=re.DOTALL
)

# split messages into double newlines, to avoid passing so many content in the search
split_messages = re.split(r'</?task>|(\n\n)', user_messages)
split_messages = re.split(r"</?task>|(\n\n)", user_messages)
collected_bad_packages = []
for item_message in split_messages:
# Vector search to find bad packages
Expand Down Expand Up @@ -143,7 +145,7 @@ async def process(
# Combine the updated task block with the rest of the message
context_msg = updated_task_content + rest_of_message
else:
context_msg = f'Context: {context_str} \n\n Query: {message_str}' # type: ignore
context_msg = f"Context: {context_str} \n\n Query: {message_str}" # type: ignore
message["content"] = context_msg

logger.debug("Final context message", context_message=context_msg)
Expand Down
3 changes: 2 additions & 1 deletion src/codegate/pipeline/secrets/secrets.py
Original file line number Diff line number Diff line change
Expand Up @@ -452,7 +452,8 @@ async def process_chunk(
return [chunk]

is_cline_client = any(
"Cline" in str(message.trigger_string or "") for message in input_context.alerts_raised or []
"Cline" in str(message.trigger_string or "")
for message in input_context.alerts_raised or []
)

# Check if this is the first chunk (delta role will be present, others will not)
Expand Down
3 changes: 2 additions & 1 deletion src/codegate/providers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,8 @@ async def _cleanup_after_streaming(
context.sensitive.secure_cleanup()

async def complete(
self, data: Dict, api_key: Optional[str], is_fim_request: bool) -> Union[ModelResponse, AsyncIterator[ModelResponse]]:
self, data: Dict, api_key: Optional[str], is_fim_request: bool
) -> Union[ModelResponse, AsyncIterator[ModelResponse]]:
"""
Main completion flow with pipeline integration
Expand Down
2 changes: 1 addition & 1 deletion src/codegate/providers/openai/provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ async def create_completion(

# if model starts with lm_studio, propagate it
if data.get("model", "").startswith("lm_studio"):
data["base_url"] = self.lm_studio_url+"/v1/"
data["base_url"] = self.lm_studio_url + "/v1/"
is_fim_request = self._is_fim_request(request, data)
try:
stream = await self.complete(data, api_key, is_fim_request=is_fim_request)
Expand Down
4 changes: 1 addition & 3 deletions src/codegate/storage/storage_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,9 +53,7 @@ def __init__(self, data_path="./sqlite_data"):
self.inference_engine = LlamaCppInferenceEngine()
conf = Config.get_config()
if conf and conf.model_base_path and conf.embedding_model:
self.model_path = (
f"{conf.model_base_path}/{conf.embedding_model}"
)
self.model_path = f"{conf.model_base_path}/{conf.embedding_model}"
else:
self.model_path = ""

Expand Down

0 comments on commit 96e20ac

Please sign in to comment.