Skip to content
This repository was archived by the owner on Jun 5, 2025. It is now read-only.

Commit 81fc260

Browse files
authored
Merge pull request #543 from stacklok/issue-440
feat: add support for aider
2 parents b49a985 + a22ea5f commit 81fc260

File tree

7 files changed

+52
-8
lines changed

7 files changed

+52
-8
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,3 +49,4 @@ sqlite_data/vectordb.db
4949

5050
# certificate directory
5151
*certs/
52+
.aider*

README.md

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,14 @@ With Continue, you can choose from several leading AI model providers:
6767

6868
🔮 Many more on the way!
6969

70+
- **[Aider](https://aider.chat)
71+
72+
With Aider, you can choose from two leading AI model providers:
73+
74+
- 💻 Local LLMs with [Ollama](https://ollama.com/)
75+
- 🧠 [OpenAI API](https://openai.com/api/)
76+
77+
7078
### Privacy first
7179

7280
Unlike E.T., your code never phones home! 🛸 CodeGate is designed with privacy
@@ -84,6 +92,7 @@ Check out the quickstart guides to get up and running quickly!
8492
- [Quickstart guide for GitHub Copilot with VS Code](https://docs.codegate.ai/quickstart)
8593
- [Quickstart guide for Continue with VS Code and Ollama](https://docs.codegate.ai/quickstart-continue)
8694

95+
8796
## 🎯 Usage
8897

8998
### IDE integration

src/codegate/pipeline/base.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -253,9 +253,11 @@ def get_latest_user_messages(request: ChatCompletionRequest) -> str:
253253

254254
for message in reversed(request.get("messages", [])):
255255
if message["role"] == "user":
256-
latest_user_messages += "\n" + message["content"]
257-
else:
258-
break
256+
# if found we can stop here, if not we continue until we find it
257+
message_str = message.get("content", "")
258+
if message_str:
259+
latest_user_messages += "\n" + str(message_str)
260+
break
259261

260262
return latest_user_messages
261263

src/codegate/pipeline/codegate_context_retriever/codegate.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,6 @@ async def process(
5959
"""
6060
Use RAG DB to add context to the user request
6161
"""
62-
6362
# Get the latest user messages
6463
user_messages = self.get_latest_user_messages(request)
6564

src/codegate/pipeline/output.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,8 @@ class OutputPipelineContext:
2727
snippets: List[CodeSnippet] = field(default_factory=list)
2828
# Store all content that has been processed by the pipeline
2929
processed_content: List[str] = field(default_factory=list)
30+
# partial buffer to store prefixes
31+
prefix_buffer: str = ""
3032

3133

3234
class OutputPipelineStep(ABC):

src/codegate/pipeline/secrets/secrets.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -280,7 +280,7 @@ async def process(
280280
if "content" in message and message["content"]:
281281
# Protect the text
282282
protected_string, redacted_count = self._redact_text(
283-
message["content"], secrets_manager, session_id, context
283+
str(message["content"]), secrets_manager, session_id, context
284284
)
285285
new_request["messages"][i]["content"] = protected_string
286286

@@ -389,12 +389,17 @@ async def process_chunk(
389389
return [chunk]
390390

391391
# If we have a partial marker at the end, keep buffering
392-
if self.marker_start in buffered_content or self._is_partial_marker_prefix(
393-
buffered_content
394-
):
392+
if self.marker_start in buffered_content:
393+
context.prefix_buffer = ""
394+
return []
395+
396+
if self._is_partial_marker_prefix(buffered_content):
397+
context.prefix_buffer += buffered_content
395398
return []
396399

397400
# No markers or partial markers, let pipeline handle the chunk normally
401+
chunk.choices[0].delta.content = context.prefix_buffer + chunk.choices[0].delta.content
402+
context.prefix_buffer = ""
398403
return [chunk]
399404

400405

src/codegate/providers/ollama/provider.py

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,32 @@ def _setup_routes(self):
4545
"""
4646
Sets up Ollama API routes.
4747
"""
48+
@self.router.get(f"/{self.provider_route_name}/api/tags")
49+
async def get_tags(request: Request):
50+
"""
51+
Special route for /api/tags that responds outside of the pipeline
52+
Tags are used to get the list of models
53+
https://github.com/ollama/ollama/blob/main/docs/api.md#list-local-models
54+
"""
55+
async with httpx.AsyncClient() as client:
56+
response = await client.get(f"{self.base_url}/api/tags")
57+
return response.json()
58+
59+
@self.router.post(f"/{self.provider_route_name}/api/show")
60+
async def show_model(request: Request):
61+
"""
62+
route for /api/show that responds outside of the pipeline
63+
/api/show displays model is used to get the model information
64+
https://github.com/ollama/ollama/blob/main/docs/api.md#show-model-information
65+
"""
66+
body = await request.body()
67+
async with httpx.AsyncClient() as client:
68+
response = await client.post(
69+
f"{self.base_url}/api/show",
70+
content=body,
71+
headers={"Content-Type": "application/json"},
72+
)
73+
return response.json()
4874

4975
# Native Ollama API routes
5076
@self.router.post(f"/{self.provider_route_name}/api/chat")

0 commit comments

Comments
 (0)