Skip to content
This repository was archived by the owner on Jun 5, 2025. It is now read-only.

Fix copilot errors which cause client to hangup during FIM #509

Merged
merged 1 commit into from
Jan 8, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 21 additions & 4 deletions src/codegate/providers/copilot/provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -659,6 +659,7 @@ def __init__(self, proxy: CopilotProvider):
def connection_made(self, transport: asyncio.Transport) -> None:
"""Handle successful connection to target"""
self.transport = transport
logger.debug(f"Target transport peer: {transport.get_extra_info('peername')}")
self.proxy.target_transport = transport

def _ensure_output_processor(self) -> None:
Expand Down Expand Up @@ -703,9 +704,10 @@ async def stream_iterator():
streaming_choices.append(
StreamingChoices(
finish_reason=choice.get("finish_reason", None),
index=0,
index=choice.get("index", 0),
delta=Delta(content=content, role="assistant"),
logprobs=None,
logprobs=choice.get("logprobs", None),
p=choice.get("p", None),
)
)

Expand All @@ -716,12 +718,13 @@ async def stream_iterator():
created=record_content.get("created", 0),
model=record_content.get("model", ""),
object="chat.completion.chunk",
stream=True,
)
yield mr

async for record in self.output_pipeline_instance.process_stream(stream_iterator()):
chunk = record.model_dump_json(exclude_none=True, exclude_unset=True)
sse_data = f"data:{chunk}\n\n".encode("utf-8")
sse_data = f"data: {chunk}\n\n".encode("utf-8")
chunk_size = hex(len(sse_data))[2:] + "\r\n"
self._proxy_transport_write(chunk_size.encode())
self._proxy_transport_write(sse_data)
Expand Down Expand Up @@ -764,6 +767,10 @@ def _proxy_transport_write(self, data: bytes):
logger.error("Proxy transport not available")
return
self.proxy.transport.write(data)
# print("DEBUG =================================")
# print(data)
# print("DEBUG =================================")
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I kept these and another set of print statements for future debugging.



def data_received(self, data: bytes) -> None:
"""Handle data received from target"""
Expand All @@ -781,11 +788,21 @@ def data_received(self, data: bytes) -> None:
if header_end != -1:
self.headers_sent = True
# Send headers first
headers = data[: header_end + 4]
headers = data[: header_end]

# If Transfer-Encoding is not present, add it
if b"Transfer-Encoding:" not in headers:
headers = headers + b"\r\nTransfer-Encoding: chunked"

headers = headers + b"\r\n\r\n"

self._proxy_transport_write(headers)
logger.debug(f"Headers sent: {headers}")

data = data[header_end + 4 :]
# print("DEBUG =================================")
# print(data)
# print("DEBUG =================================")

self._process_chunk(data)

Expand Down
2 changes: 1 addition & 1 deletion src/codegate/providers/copilot/streaming.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def process_chunk(self, chunk: bytes) -> list:
data = json.loads(data_content)
records.append({"type": "data", "content": data})
except json.JSONDecodeError:
print(f"Failed to parse JSON: {data_content}")
logger.debug(f"Failed to parse JSON: {data_content}")

return records

Expand Down
Loading