Skip to content

Commit 1ab471b

Browse files
committed
Add support for copilot extension agents
https://docs.github.com/en/copilot/building-copilot-extensions/about-building-copilot-extensions - Change @buffer and @Buffers to #buffer and #buffers - Add support for @agent agent selection - Add support for config.agent for specifying default agent - Add :CopilotChatAgents for listing agents (and showing selected agent) - Remove :CopilotChatModel, instead show which model is selected in :CopilotChatModels Closes #466 Signed-off-by: Tomas Slusny <slusnucky@gmail.com>
1 parent 76bfba5 commit 1ab471b

File tree

5 files changed

+242
-90
lines changed

5 files changed

+242
-90
lines changed

README.md

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,7 @@ Verify "[Copilot chat in the IDE](https://github.com/settings/copilot)" is enabl
110110
- `:CopilotChatLoad <name>?` - Load chat history from file
111111
- `:CopilotChatDebugInfo` - Show debug information
112112
- `:CopilotChatModels` - View and select available models. This is reset when a new instance is made. Please set your model in `init.lua` for persistence.
113-
- `:CopilotChatModel` - View the currently selected model.
113+
- `:CopilotChatAgents` - View and select available agents. This is reset when a new instance is made. Please set your agent in `init.lua` for persistence.
114114

115115
#### Commands coming from default prompts
116116

@@ -202,7 +202,8 @@ Also see [here](/lua/CopilotChat/config.lua):
202202
allow_insecure = false, -- Allow insecure server connections
203203

204204
system_prompt = prompts.COPILOT_INSTRUCTIONS, -- System prompt to use
205-
model = 'gpt-4o', -- GPT model to use, see ':CopilotChatModels' for available models
205+
model = 'gpt-4o', -- Default model to use, see ':CopilotChatModels' for available models
206+
agent = 'copilot', -- Default agent to use, see ':CopilotChatAgents' for available agents (can be specified manually in prompt via @).
206207
temperature = 0.1, -- GPT temperature
207208

208209
question_header = '## User ', -- Header to use for user questions
@@ -218,7 +219,7 @@ Also see [here](/lua/CopilotChat/config.lua):
218219
clear_chat_on_new_prompt = false, -- Clears chat on every new prompt
219220
highlight_selection = true, -- Highlight selection in the source buffer when in the chat window
220221

221-
context = nil, -- Default context to use, 'buffers', 'buffer' or none (can be specified manually in prompt via @).
222+
context = nil, -- Default context to use, 'buffers', 'buffer' or none (can be specified manually in prompt via #).
222223
history_path = vim.fn.stdpath('data') .. '/copilotchat_history', -- Default path to stored history
223224
callback = nil, -- Callback to use when ask response is received
224225

lua/CopilotChat/config.lua

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -69,6 +69,7 @@ local select = require('CopilotChat.select')
6969
---@field allow_insecure boolean?
7070
---@field system_prompt string?
7171
---@field model string?
72+
---@field agent string?
7273
---@field temperature number?
7374
---@field question_header string?
7475
---@field answer_header string?
@@ -94,7 +95,8 @@ return {
9495
allow_insecure = false, -- Allow insecure server connections
9596

9697
system_prompt = prompts.COPILOT_INSTRUCTIONS, -- System prompt to use
97-
model = 'gpt-4o', -- GPT model to use, see ':CopilotChatModels' for available models
98+
model = 'gpt-4o', -- Default model to use, see ':CopilotChatModels' for available models
99+
agent = 'copilot', -- Default agent to use, see ':CopilotChatAgents' for available agents (can be specified manually in prompt via @).
98100
temperature = 0.1, -- GPT temperature
99101

100102
question_header = '## User ', -- Header to use for user questions
@@ -110,7 +112,7 @@ return {
110112
clear_chat_on_new_prompt = false, -- Clears chat on every new prompt
111113
highlight_selection = true, -- Highlight selection
112114

113-
context = nil, -- Default context to use, 'buffers', 'buffer' or none (can be specified manually in prompt via @).
115+
context = nil, -- Default context to use, 'buffers', 'buffer' or none (can be specified manually in prompt via #).
114116
history_path = vim.fn.stdpath('data') .. '/copilotchat_history', -- Default path to stored history
115117
callback = nil, -- Callback to use when ask response is received
116118

lua/CopilotChat/copilot.lua

Lines changed: 81 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
---@field end_row number?
1414
---@field system_prompt string?
1515
---@field model string?
16+
---@field agent string?
1617
---@field temperature number?
1718
---@field on_progress nil|fun(response: string):nil
1819

@@ -29,6 +30,7 @@
2930
---@field load fun(self: CopilotChat.Copilot, name: string, path: string):table
3031
---@field running fun(self: CopilotChat.Copilot):boolean
3132
---@field list_models fun(self: CopilotChat.Copilot):table
33+
---@field list_agents fun(self: CopilotChat.Copilot):table
3234

3335
local async = require('plenary.async')
3436
local log = require('plenary.log')
@@ -340,6 +342,7 @@ local Copilot = class(function(self, proxy, allow_insecure)
340342
self.sessionid = nil
341343
self.machineid = machine_id()
342344
self.models = nil
345+
self.agents = nil
343346
self.claude_enabled = false
344347
self.current_job = nil
345348
self.request_args = {
@@ -461,6 +464,39 @@ function Copilot:fetch_models()
461464
return out
462465
end
463466

467+
function Copilot:fetch_agents()
468+
if self.agents then
469+
return self.agents
470+
end
471+
472+
local response, err = curl_get(
473+
'https://api.githubcopilot.com/agents',
474+
vim.tbl_extend('force', self.request_args, {
475+
headers = self:authenticate(),
476+
})
477+
)
478+
479+
if err then
480+
error(err)
481+
end
482+
483+
if response.status ~= 200 then
484+
error('Failed to fetch agents: ' .. tostring(response.status))
485+
end
486+
487+
local agents = vim.json.decode(response.body)['agents']
488+
local out = {}
489+
for _, agent in ipairs(agents) do
490+
out[agent['slug']] = agent
491+
end
492+
493+
out['copilot'] = { name = 'Copilot', default = true }
494+
495+
log.info('Agents fetched')
496+
self.agents = out
497+
return out
498+
end
499+
464500
function Copilot:enable_claude()
465501
if self.claude_enabled then
466502
return true
@@ -510,6 +546,7 @@ function Copilot:ask(prompt, opts)
510546
local selection = opts.selection or {}
511547
local system_prompt = opts.system_prompt or prompts.COPILOT_INSTRUCTIONS
512548
local model = opts.model or 'gpt-4o-2024-05-13'
549+
local agent = opts.agent or 'copilot'
513550
local temperature = opts.temperature or 0.1
514551
local on_progress = opts.on_progress
515552
local job_id = uuid()
@@ -522,9 +559,12 @@ function Copilot:ask(prompt, opts)
522559
log.debug('Filename: ' .. filename)
523560
log.debug('Filetype: ' .. filetype)
524561
log.debug('Model: ' .. model)
562+
log.debug('Agent: ' .. agent)
525563
log.debug('Temperature: ' .. temperature)
526564

527565
local models = self:fetch_models()
566+
local agents = self:fetch_agents()
567+
local agent_config = agents[agent]
528568
local capabilities = models[model] and models[model].capabilities
529569
local max_tokens = capabilities.limits.max_prompt_tokens -- FIXME: Is max_prompt_tokens the right limit?
530570
local max_output_tokens = capabilities.limits.max_output_tokens
@@ -582,6 +622,7 @@ function Copilot:ask(prompt, opts)
582622
local errored = false
583623
local finished = false
584624
local full_response = ''
625+
local full_references = ''
585626

586627
local function finish_stream(err, job)
587628
if err then
@@ -631,6 +672,22 @@ function Copilot:ask(prompt, opts)
631672
return
632673
end
633674

675+
if content.copilot_references then
676+
for _, reference in ipairs(content.copilot_references) do
677+
local metadata = reference.metadata
678+
if metadata and metadata.display_name and metadata.display_url then
679+
full_references = full_references
680+
.. '\n'
681+
.. '['
682+
.. metadata.display_name
683+
.. ']'
684+
.. '('
685+
.. metadata.display_url
686+
.. ')'
687+
end
688+
end
689+
end
690+
634691
if not content.choices or #content.choices == 0 then
635692
return
636693
end
@@ -668,8 +725,13 @@ function Copilot:ask(prompt, opts)
668725
self:enable_claude()
669726
end
670727

728+
local url = 'https://api.githubcopilot.com/chat/completions'
729+
if not agent_config.default then
730+
url = 'https://api.githubcopilot.com/agents/' .. agent .. '?chat'
731+
end
732+
671733
local response, err = curl_post(
672-
'https://api.githubcopilot.com/chat/completions',
734+
url,
673735
vim.tbl_extend('force', self.request_args, {
674736
headers = self:authenticate(),
675737
body = temp_file(body),
@@ -708,6 +770,14 @@ function Copilot:ask(prompt, opts)
708770
return
709771
end
710772

773+
if full_references ~= '' then
774+
full_references = '\n\n**`References:`**' .. full_references
775+
full_response = full_response .. full_references
776+
if on_progress then
777+
on_progress(full_references)
778+
end
779+
end
780+
711781
log.trace('Full response: ' .. full_response)
712782
log.debug('Last message: ' .. vim.inspect(last_message))
713783

@@ -727,10 +797,10 @@ function Copilot:ask(prompt, opts)
727797
end
728798

729799
--- List available models
800+
---@return table
730801
function Copilot:list_models()
731802
local models = self:fetch_models()
732803

733-
-- Group models by version and shortest ID
734804
local version_map = {}
735805
for id, model in pairs(models) do
736806
local version = model.version
@@ -739,10 +809,18 @@ function Copilot:list_models()
739809
end
740810
end
741811

742-
-- Map to IDs and sort
743812
local result = vim.tbl_values(version_map)
744813
table.sort(result)
814+
return result
815+
end
816+
817+
--- List available agents
818+
---@return table
819+
function Copilot:list_agents()
820+
local agents = self:fetch_agents()
745821

822+
local result = vim.tbl_keys(agents)
823+
table.sort(result)
746824
return result
747825
end
748826

0 commit comments

Comments
 (0)