add new plugs
This commit is contained in:
parent
8e2a82e61d
commit
4e100409b1
2
init.lua
2
init.lua
|
@ -870,7 +870,7 @@ require('lazy').setup({
|
||||||
-- This is the easiest way to modularize your config.
|
-- This is the easiest way to modularize your config.
|
||||||
--
|
--
|
||||||
-- Uncomment the following line and add your plugins to `lua/custom/plugins/*.lua` to get going.
|
-- Uncomment the following line and add your plugins to `lua/custom/plugins/*.lua` to get going.
|
||||||
-- { import = 'custom.plugins' },
|
{ import = 'custom.plugins' },
|
||||||
--
|
--
|
||||||
-- For additional information with loading, sourcing and examples see `:help lazy.nvim-🔌-plugin-spec`
|
-- For additional information with loading, sourcing and examples see `:help lazy.nvim-🔌-plugin-spec`
|
||||||
-- Or use telescope!
|
-- Or use telescope!
|
||||||
|
|
|
@ -14,3 +14,7 @@ else
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
return {
|
||||||
|
'https://github.com/mbbill/undotree',
|
||||||
|
}
|
||||||
|
|
|
@ -18,4 +18,5 @@ return {}
|
||||||
-- https://github.com/vipul-sharma20/nvim-jira
|
-- https://github.com/vipul-sharma20/nvim-jira
|
||||||
-- https://github.com/walterl/centerfold
|
-- https://github.com/walterl/centerfold
|
||||||
-- https://github.com/tricktux/pomodoro.vim/tree/master - but fork it into your own in lua
|
-- https://github.com/tricktux/pomodoro.vim/tree/master - but fork it into your own in lua
|
||||||
|
-- https://github.com/mbbill/undotree
|
||||||
--]]
|
--]]
|
||||||
|
|
|
@ -0,0 +1,78 @@
|
||||||
|
if true then
|
||||||
|
return {}
|
||||||
|
else
|
||||||
|
return {
|
||||||
|
'gerazov/ollama-chat.nvim',
|
||||||
|
dependencies = {
|
||||||
|
'nvim-lua/plenary.nvim',
|
||||||
|
'stevearc/dressing.nvim',
|
||||||
|
'nvim-telescope/telescope.nvim',
|
||||||
|
},
|
||||||
|
-- lazy load on command
|
||||||
|
cmd = {
|
||||||
|
'OllamaQuickChat',
|
||||||
|
'OllamaCreateNewChat',
|
||||||
|
'OllamaContinueChat',
|
||||||
|
'OllamaChat',
|
||||||
|
'OllamaChatCode',
|
||||||
|
'OllamaModel',
|
||||||
|
'OllamaServe',
|
||||||
|
'OllamaServeStop',
|
||||||
|
},
|
||||||
|
|
||||||
|
keys = {
|
||||||
|
{
|
||||||
|
'<leader>ocq',
|
||||||
|
'<cmd>OllamaQuickChat<cr>',
|
||||||
|
desc = 'Ollama Quick Chat',
|
||||||
|
mode = { 'n', 'x' },
|
||||||
|
silent = true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'<leader>ocn',
|
||||||
|
'<cmd>OllamaCreateNewChat<cr>',
|
||||||
|
desc = 'Create Ollama Chat',
|
||||||
|
mode = { 'n', 'x' },
|
||||||
|
silent = true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'<leader>occ',
|
||||||
|
'<cmd>OllamaContinueChat<cr>',
|
||||||
|
desc = 'Continue Ollama Chat',
|
||||||
|
mode = { 'n', 'x' },
|
||||||
|
silent = true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'<leader>och',
|
||||||
|
'<cmd>OllamaChat<cr>',
|
||||||
|
desc = 'Chat',
|
||||||
|
mode = { 'n' },
|
||||||
|
silent = true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'<leader>ocd',
|
||||||
|
'<cmd>OllamaChatCode<cr>',
|
||||||
|
desc = 'Chat Code',
|
||||||
|
mode = { 'n' },
|
||||||
|
silent = true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
opts = {
|
||||||
|
chats_folder = vim.fn.stdpath 'data', -- data folder is ~/.local/share/nvim
|
||||||
|
-- you can also choose "current" and "tmp"
|
||||||
|
quick_chat_file = 'ollama-chat.md',
|
||||||
|
animate_spinner = true, -- set this to false to disable spinner animation
|
||||||
|
model = 'openhermes2-mistral',
|
||||||
|
model_code = 'codellama',
|
||||||
|
url = 'http://127.0.0.1:11434',
|
||||||
|
serve = {
|
||||||
|
on_start = false,
|
||||||
|
command = 'ollama',
|
||||||
|
args = { 'serve' },
|
||||||
|
stop_command = 'pkill',
|
||||||
|
stop_args = { '-SIGTERM', 'ollama' },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
end
|
|
@ -0,0 +1,7 @@
|
||||||
|
if true then
|
||||||
|
return {}
|
||||||
|
else
|
||||||
|
return {
|
||||||
|
'https://github.com/mbbill/undotree',
|
||||||
|
}
|
||||||
|
end
|
Loading…
Reference in New Issue