Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: support for multiple providers #93

Merged
merged 50 commits into from
Jul 9, 2024
Merged
Show file tree
Hide file tree
Changes from 12 commits
Commits
Show all changes
50 commits
Select commit Hold shift + click to select a range
5512e89
chore: wip backup
Robitx Jan 17, 2024
4d06669
chore: wip backup
Robitx Jan 18, 2024
d1eca2e
Merge branch 'main' of github.com:Robitx/gp.nvim into copilot
Robitx Jan 20, 2024
12900cf
chore: formating
Robitx Jan 20, 2024
e9afe8b
feat: working copilot agents
Robitx Jan 20, 2024
0dc0cc8
Merge branch 'main' of github.com:Robitx/gp.nvim into copilot
Robitx Jan 26, 2024
fc0b9cb
chore: wip backup
Robitx Jan 26, 2024
5a1916e
chore: wip backup + lmstudio test
Robitx Jan 31, 2024
34cef5a
chore: fmt
Robitx Jan 31, 2024
1e0d01a
Merge branch 'main' of github.com:Robitx/gp.nvim into copilot
Robitx Feb 3, 2024
51df9ba
chore: wip
Robitx Feb 20, 2024
561b621
Merge branch 'main' of github.com:Robitx/gp.nvim into copilot
Robitx Feb 27, 2024
be8c2b1
fix: toggle GpChatNew popup (resolve #112)
Robitx Feb 28, 2024
79747e3
chore: auto-generate vimdoc
github-actions[bot] Feb 28, 2024
8d2f1af
chore: strip whitespace in buf target resolve
Robitx Feb 28, 2024
d76be3d
fix: handle symlinks in chat checks (issue: #104)
Robitx Feb 28, 2024
bce3808
feat: ollama with openAI endpoint + gemini
Robitx Mar 24, 2024
0aec2ac
chore: fix typo
Robitx Mar 24, 2024
fd30689
feat: show current agent in chat
Robitx Mar 24, 2024
466aca4
chore: copilot bearer refreshing
Robitx Mar 24, 2024
2777776
feat: dynamic model for googleai
Robitx Mar 24, 2024
c130cf2
feat: support for perplexity and anthropic
Robitx Mar 24, 2024
bbd07e6
docs: vim-plug setup snippet (issue: #123)
Robitx Mar 26, 2024
52938ff
chore: auto-generate vimdoc
github-actions[bot] Mar 26, 2024
b372f8a
feat: update defaults to use latest model versions: gpt-4o and gpt-3.…
yiblet May 14, 2024
aaec2ab
chore: auto-generate vimdoc
github-actions[bot] May 28, 2024
33a4d69
feat: filter out empty lines in picker
tanloong Apr 6, 2024
486a061
fix: set the popup window to markdown filetype
XXiaoA Apr 6, 2024
3f9e5dc
chore: wip backup
Robitx Jan 17, 2024
a10524c
chore: wip backup
Robitx Jan 18, 2024
50f6c3c
chore: formating
Robitx Jan 20, 2024
8d990b3
feat: working copilot agents
Robitx Jan 20, 2024
1445703
chore: wip backup
Robitx Jan 26, 2024
7d329cd
chore: wip backup + lmstudio test
Robitx Jan 31, 2024
86d72f0
chore: fmt
Robitx Jan 31, 2024
2a6095d
chore: wip
Robitx Feb 20, 2024
300f4bf
feat: ollama with openAI endpoint + gemini
Robitx Mar 24, 2024
43f3f9f
chore: fix typo
Robitx Mar 24, 2024
55dc4a2
feat: show current agent in chat
Robitx Mar 24, 2024
b6653d3
chore: copilot bearer refreshing
Robitx Mar 24, 2024
9ccccc5
feat: dynamic model for googleai
Robitx Mar 24, 2024
c35c248
feat: support for perplexity and anthropic
Robitx Mar 24, 2024
9a4a226
chore: handle old user chat prefix
Robitx Mar 25, 2024
f99d824
chore: mr fixes
Robitx Jul 9, 2024
12aa0df
chore: newer copilot headers
Robitx Jul 9, 2024
1bdbcdc
chore: GPT4 => GPT4o rename
Robitx Jul 9, 2024
319f899
chore: default sys prompt vars
Robitx Jul 9, 2024
03b1d1e
chore: disable all but openAI before merging to main
Robitx Jul 9, 2024
2bf38ed
chore: better migration message
Robitx Jul 9, 2024
6704bd2
docs: readme section about multi provider support
Robitx Jul 9, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
157 changes: 149 additions & 8 deletions lua/gp/config.lua
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,43 @@ local config = {
-- openai_api_key: "sk-...",
-- openai_api_key = os.getenv("env_name.."),
openai_api_key = os.getenv("OPENAI_API_KEY"),
-- api endpoint (you can change this to azure endpoint)
openai_api_endpoint = "https://api.openai.com/v1/chat/completions",
-- openai_api_endpoint = "https://$URL.openai.azure.com/openai/deployments/{{model}}/chat/completions?api-version=2023-03-15-preview",

-- at least one working provider is required
-- to disable a provider set it to empty table like openai = {}
providers = {
Robitx marked this conversation as resolved.
Show resolved Hide resolved
-- secrets can be strings or tables with command and arguments
-- secret = { "cat", "path_to/openai_api_key" },
-- secret = { "bw", "get", "password", "OPENAI_API_KEY" },
-- secret : "sk-...",
-- secret = os.getenv("env_name.."),
openai = {
endpoint = "https://api.openai.com/v1/chat/completions",
-- secret = os.getenv("OPENAI_API_KEY"),
},
azure = {
-- endpoint = "https://$URL.openai.azure.com/openai/deployments/{{model}}/chat/completions",
-- secret = os.getenv("AZURE_API_KEY"),
},
copilot = {
endpoint = "https://api.githubcopilot.com/chat/completions",
secret = {
"bash",
"-c",
"cat ~/.config/github-copilot/hosts.json | sed -e 's/.*oauth_token...//;s/\".*//'",
},
},
ollama = {
endpoint = "http://localhost:11434/api/chat",
},
lmsudio = {
Robitx marked this conversation as resolved.
Show resolved Hide resolved
endpoint = "http://localhost:1234/v1/chat/completions",
},
googleai = {
endpoint = "https://generativelanguage.googleapis.com/v1beta/models/gemini-pro:streamGenerateContent?key={{secret}}",
secret = os.getenv("GOOGLEAI_API_KEY"),
},
},

-- prefix for all commands
cmd_prefix = "Gp",
-- optional curl parameters (for proxy, etc.)
Expand All @@ -40,7 +74,7 @@ local config = {
chat = true,
command = false,
-- string with model name or table with model name and parameters
model = { model = "gpt-4-1106-preview", temperature = 1.1, top_p = 1 },
model = { model = "gpt-4-turbo-preview", temperature = 1.1, top_p = 1 },
-- system prompt (use this to specify the persona/role of the AI)
system_prompt = "You are a general AI assistant.\n\n"
.. "The user provided the additional info about how they would like you to respond:\n\n"
Expand All @@ -53,11 +87,48 @@ local config = {
.. "- Take a deep breath; You've got this!\n",
},
{
provider = "openai",
name = "ChatGPT3-5",
chat = true,
command = false,
-- string with model name or table with model name and parameters
model = { model = "gpt-3.5-turbo-1106", temperature = 1.1, top_p = 1 },
model = { model = "gpt-3.5-turbo", temperature = 1.1, top_p = 1 },
-- system prompt (use this to specify the persona/role of the AI)
system_prompt = "You are a general AI assistant.\n\n"
.. "The user provided the additional info about how they would like you to respond:\n\n"
.. "- If you're unsure don't guess and say you don't know instead.\n"
.. "- Ask question if you need clarification to provide better answer.\n"
.. "- Think deeply and carefully from first principles step by step.\n"
.. "- Zoom out first to see the big picture and then zoom in to details.\n"
.. "- Use Socratic method to improve your thinking and coding skills.\n"
.. "- Don't elide any code from your output if the answer requires coding.\n"
.. "- Take a deep breath; You've got this!\n",
},
{
provider = "copilot",
name = "ChatCopilot",
chat = true,
command = false,
-- string with model name or table with model name and parameters
model = { model = "gpt-4", temperature = 1.1, top_p = 1 },
-- system prompt (use this to specify the persona/role of the AI)
system_prompt = "You are a general AI assistant.\n\n"
.. "The user provided the additional info about how they would like you to respond:\n\n"
.. "- If you're unsure don't guess and say you don't know instead.\n"
.. "- Ask question if you need clarification to provide better answer.\n"
.. "- Think deeply and carefully from first principles step by step.\n"
.. "- Zoom out first to see the big picture and then zoom in to details.\n"
.. "- Use Socratic method to improve your thinking and coding skills.\n"
.. "- Don't elide any code from your output if the answer requires coding.\n"
.. "- Take a deep breath; You've got this!\n",
},
{
provider = "googleai",
name = "ChatGemini",
chat = true,
command = false,
-- string with model name or table with model name and parameters
model = { model = "gpt-4", temperature = 1.1, top_p = 1 },
-- system prompt (use this to specify the persona/role of the AI)
system_prompt = "You are a general AI assistant.\n\n"
.. "The user provided the additional info about how they would like you to respond:\n\n"
Expand All @@ -70,27 +141,92 @@ local config = {
.. "- Take a deep breath; You've got this!\n",
},
{
provider = "ollama",
name = "ChatOllama",
chat = true,
command = false,
-- string with model name or table with model name and parameters
model = {
model = "mistral:7b-instruct-v0.2-q4_K_M",
temperature = 1.97,
top_p = 1,
num_ctx = 8192,
min_p = 0.05,
},
-- system prompt (use this to specify the persona/role of the AI)
system_prompt = "You are a general AI assistant.",
},
{
provider = "lmsudio",
name = "ChatLMStudio",
chat = true,
command = false,
-- string with model name or table with model name and parameters
model = {
model = "dummy",
temperature = 0.97,
top_p = 1,
num_ctx = 8192,
min_p = 0.05,
},
-- system prompt (use this to specify the persona/role of the AI)
system_prompt = "You are a general AI assistant.",
},
{
provider = "openai",
name = "CodeGPT4",
chat = false,
command = true,
-- string with model name or table with model name and parameters
model = { model = "gpt-4-1106-preview", temperature = 0.8, top_p = 1 },
model = { model = "gpt-4-turbo-preview", temperature = 0.8, top_p = 1 },
-- system prompt (use this to specify the persona/role of the AI)
system_prompt = "You are an AI working as a code editor.\n\n"
.. "Please AVOID COMMENTARY OUTSIDE OF THE SNIPPET RESPONSE.\n"
.. "START AND END YOUR ANSWER WITH:\n\n```",
},
{
provider = "openai",
name = "CodeGPT3-5",
chat = false,
command = true,
-- string with model name or table with model name and parameters
model = { model = "gpt-3.5-turbo-1106", temperature = 0.8, top_p = 1 },
model = { model = "gpt-3.5-turbo", temperature = 0.8, top_p = 1 },
-- system prompt (use this to specify the persona/role of the AI)
system_prompt = "You are an AI working as a code editor.\n\n"
.. "Please AVOID COMMENTARY OUTSIDE OF THE SNIPPET RESPONSE.\n"
.. "START AND END YOUR ANSWER WITH:\n\n```",
},
{
provider = "copilot",
name = "CodeCopilot",
chat = false,
command = true,
-- string with the Copilot engine name or table with engine name and parameters if applicable
model = { model = "gpt-4", temperature = 0.8, top_p = 1, n = 1 },
-- system prompt (use this to specify the persona/role of the AI)
system_prompt = "You are an AI working as a code editor.\n\n"
.. "Please AVOID COMMENTARY OUTSIDE OF THE SNIPPET RESPONSE.\n"
.. "START AND END YOUR ANSWER WITH:\n\n```",
},
{
provider = "ollama",
name = "CodeOllamaDeepSeek",
chat = false,
command = true,
-- string with the Copilot engine name or table with engine name and parameters if applicable
model = {
model = "mistral:7b-instruct-v0.2-q4_K_M",
temperature = 1.9,
top_p = 1,
num_ctx = 8192,
min_p = 0.05,
},
-- system prompt (use this to specify the persona/role of the AI)
system_prompt = "You are an AI working as a code editor providing answers.\n\n"
.. "Use 4 SPACES FOR INDENTATION.\n"
.. "Please AVOID COMMENTARY OUTSIDE OF THE SNIPPET RESPONSE.\n"
.. "START AND END YOUR ANSWER WITH:\n\n```",
},
},

-- directory for storing chat files
Expand All @@ -106,7 +242,6 @@ local config = {
chat_topic_gen_prompt = "Summarize the topic of our conversation above"
.. " in two or three words. Respond only with those words.",
-- chat topic model (string with model name or table with model name and parameters)
chat_topic_gen_model = "gpt-3.5-turbo-16k",
-- explicitly confirm deletion of a chat file
chat_confirm_delete = true,
-- conceal model parameters in chat
Expand Down Expand Up @@ -301,6 +436,12 @@ local config = {
local copy = vim.deepcopy(plugin)
local key = copy.config.openai_api_key
copy.config.openai_api_key = key:sub(1, 3) .. string.rep("*", #key - 6) .. key:sub(-3)
for provider, _ in pairs(copy.providers) do
local s = copy.providers[provider].secret
if s and type(s) == "string" then
copy.providers[provider].secret = s:sub(1, 3) .. string.rep("*", #s - 6) .. s:sub(-3)
end
end
local plugin_info = string.format("Plugin structure:\n%s", vim.inspect(copy))
local params_info = string.format("Command params:\n%s", vim.inspect(params))
local lines = vim.split(plugin_info .. "\n" .. params_info, "\n")
Expand Down
1 change: 1 addition & 0 deletions lua/gp/health.lua
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ function M.check()
vim.health.error("require('gp').setup() has not been called")
end

--TODO: obsolete
---@diagnostic disable-next-line: undefined-field
local api_key = gp.config.openai_api_key

Expand Down
Loading