From bb879164df68a25b9cd50892559c4a6468755c5e Mon Sep 17 00:00:00 2001 From: Tomas Slusny Date: Tue, 21 Oct 2025 23:12:21 +0200 Subject: [PATCH] refactor: move list_prompts to prompts module completely - Renamed `prompt.lua` to `prompts.lua` for clarity and consistency. - Moved prompt listing logic from `init.lua` to `prompts.lua`. - Updated all references to use the new `prompts` module. - Removed unused model and prompt listing functions from `init.lua`. --- lua/CopilotChat/init.lua | 62 +++++++-------------- lua/CopilotChat/{prompt.lua => prompts.lua} | 61 +++++++------------- 2 files changed, 38 insertions(+), 85 deletions(-) rename lua/CopilotChat/{prompt.lua => prompts.lua} (94%) diff --git a/lua/CopilotChat/init.lua b/lua/CopilotChat/init.lua index c738d4a8..0eb6f9de 100644 --- a/lua/CopilotChat/init.lua +++ b/lua/CopilotChat/init.lua @@ -2,7 +2,7 @@ local async = require('plenary.async') local log = require('plenary.log') local client = require('CopilotChat.client') local constants = require('CopilotChat.constants') -local prompts = require('CopilotChat.prompt') +local prompts = require('CopilotChat.prompts') local select = require('CopilotChat.select') local utils = require('CopilotChat.utils') local curl = require('CopilotChat.utils.curl') @@ -145,45 +145,6 @@ local function store_sticky(prompt) state.sticky = sticky end ---- List available models. ---- @return CopilotChat.client.Model[] -local function list_models() - local models = client:models() - local result = vim.tbl_keys(models) - - table.sort(result, function(a, b) - a = models[a] - b = models[b] - if a.provider ~= b.provider then - return a.provider < b.provider - end - return a.id < b.id - end) - - return vim.tbl_map(function(id) - return models[id] - end, result) -end - ---- List available prompts. ----@return table -local function list_prompts() - local prompts_to_use = {} - - for name, prompt in pairs(M.config.prompts) do - local val = prompt - if type(prompt) == 'string' then - val = { - prompt = prompt, - } - end - - prompts_to_use[name] = val - end - - return prompts_to_use -end - --- Finish writing to chat buffer. ---@param start_of_chat boolean? local function finish(start_of_chat) @@ -413,7 +374,22 @@ end --- Select default Copilot GPT model. function M.select_model() async.run(function() - local models = list_models() + local models = client:models() + local result = vim.tbl_keys(models) + + table.sort(result, function(a, b) + a = models[a] + b = models[b] + if a.provider ~= b.provider then + return a.provider < b.provider + end + return a.id < b.id + end) + + models = vim.tbl_map(function(id) + return models[id] + end, result) + local choices = vim.tbl_map(function(model) return { id = model.id, @@ -467,7 +443,7 @@ end --- Select a prompt template to use. ---@param config CopilotChat.config.Shared? function M.select_prompt(config) - local prompts = list_prompts() + local prompts = prompts.list_prompts() local keys = vim.tbl_keys(prompts) table.sort(keys) @@ -859,7 +835,7 @@ function M.setup(config) end) end - for name, prompt in pairs(list_prompts()) do + for name, prompt in pairs(prompts.list_prompts()) do if prompt.prompt then vim.api.nvim_create_user_command('CopilotChat' .. name, function(args) local input = prompt.prompt diff --git a/lua/CopilotChat/prompt.lua b/lua/CopilotChat/prompts.lua similarity index 94% rename from lua/CopilotChat/prompt.lua rename to lua/CopilotChat/prompts.lua index b35da0f2..c67c4ee7 100644 --- a/lua/CopilotChat/prompt.lua +++ b/lua/CopilotChat/prompts.lua @@ -10,29 +10,27 @@ local WORD_NO_INPUT = '([^%s]+)' local WORD_WITH_INPUT_QUOTED = WORD .. ':`([^`]+)`' local WORD_WITH_INPUT_UNQUOTED = WORD .. ':?([^%s`]*)' ---- List available models. ---- @return CopilotChat.client.Model[] -local function list_models() - local models = client:models() - local result = vim.tbl_keys(models) - - table.sort(result, function(a, b) - a = models[a] - b = models[b] - if a.provider ~= b.provider then - return a.provider < b.provider - end - return a.id < b.id - end) - - return vim.tbl_map(function(id) - return models[id] - end, result) +--- Find custom instructions in the current working directory. +---@param cwd string +---@return table +local function find_custom_instructions(cwd) + local out = {} + local copilot_instructions_path = vim.fs.joinpath(cwd, '.github', 'copilot-instructions.md') + local copilot_instructions = files.read_file(copilot_instructions_path) + if copilot_instructions then + table.insert(out, { + filename = copilot_instructions_path, + content = vim.trim(copilot_instructions), + }) + end + return out end +local M = {} + --- List available prompts. ---@return table -local function list_prompts() +function M.list_prompts() local config = require('CopilotChat.config') local prompts_to_use = {} @@ -50,24 +48,6 @@ local function list_prompts() return prompts_to_use end ---- Find custom instructions in the current working directory. ----@param cwd string ----@return table -local function find_custom_instructions(cwd) - local out = {} - local copilot_instructions_path = vim.fs.joinpath(cwd, '.github', 'copilot-instructions.md') - local copilot_instructions = files.read_file(copilot_instructions_path) - if copilot_instructions then - table.insert(out, { - filename = copilot_instructions_path, - content = vim.trim(copilot_instructions), - }) - end - return out -end - -local M = {} - --- Resolve enabled tools from the prompt. ---@param prompt string? ---@param config CopilotChat.config.Shared? @@ -299,7 +279,7 @@ function M.resolve_prompt(prompt, config) end end - local prompts_to_use = list_prompts() + local prompts_to_use = M.list_prompts() local depth = 0 local MAX_DEPTH = 10 @@ -370,10 +350,7 @@ end ---@async function M.resolve_model(prompt, config) config, prompt = M.resolve_prompt(prompt, config) - - local models = vim.tbl_map(function(model) - return model.id - end, list_models()) + local models = vim.tbl_keys(client:models()) local selected_model = config.model or '' prompt = prompt:gsub('%$' .. WORD, function(match)