From 793143c6d2810fd1a57d93b102eb8e18a7374a34 Mon Sep 17 00:00:00 2001 From: "Gustavo Silva (es00679121_prosegur)" Date: Sun, 21 Jan 2024 20:35:43 +0100 Subject: [PATCH] add chatgpt, norg and gen plugins --- after/plugin/settings.lua | 3 + init.lua | 4 +- lazy-lock.json | 3 + lua/custom/plugins/chatgpt.lua | 185 +++++++++++++++++++++++++ lua/custom/plugins/mardown_preview.lua | 30 +++- lua/custom/plugins/norg.lua | 35 +++++ lua/custom/plugins/ollama.lua | 39 ++++++ 7 files changed, 291 insertions(+), 8 deletions(-) create mode 100644 lua/custom/plugins/chatgpt.lua create mode 100644 lua/custom/plugins/norg.lua create mode 100644 lua/custom/plugins/ollama.lua diff --git a/after/plugin/settings.lua b/after/plugin/settings.lua index 7cad66ae..e3918132 100644 --- a/after/plugin/settings.lua +++ b/after/plugin/settings.lua @@ -58,3 +58,6 @@ opt.foldmethod = 'indent' opt.foldenable = false opt.foldlevel = 99 global.markdown_folding = 1 + +global.mkdp_markdown_css = '~/markdown.css' +global.mkdp_highlight_css = '' diff --git a/init.lua b/init.lua index 8c196ac6..39dfaca2 100644 --- a/init.lua +++ b/init.lua @@ -690,11 +690,11 @@ local servers = { } }, grammarly = { - filetypes = { 'markdown' }, + filetypes = { 'markdown', 'norg' }, }, + tsserver = {}, -- pyright = {}, -- rust_analyzer = {}, - -- tsserver = {}, -- html = { filetypes = { 'html', 'twig', 'hbs'} }, lua_ls = { diff --git a/lazy-lock.json b/lazy-lock.json index db947467..7cad8351 100644 --- a/lazy-lock.json +++ b/lazy-lock.json @@ -1,4 +1,5 @@ { + "ChatGPT.nvim": { "branch": "main", "commit": "48c59167beeb6ee0caa501c46cecc97b9be8571d" }, "Comment.nvim": { "branch": "master", "commit": "0236521ea582747b58869cb72f70ccfa967d2e89" }, "LuaSnip": { "branch": "master", "commit": "80a8528f084a97b624ae443a6f50ff8074ba486b" }, "cmp-nvim-lsp": { "branch": "main", "commit": "44b16d11215dce86f253ce0c30949813c0a90765" }, @@ -6,6 +7,7 @@ "copilot.vim": { "branch": "release", "commit": "309b3c803d1862d5e84c7c9c5749ae04010123b8" }, "fidget.nvim": { "branch": "main", "commit": "0ba1e16d07627532b6cae915cc992ecac249fb97" }, "friendly-snippets": { "branch": "main", "commit": "43727c2ff84240e55d4069ec3e6158d74cb534b6" }, + "gen.nvim": { "branch": "main", "commit": "41ad952c8269fa7aa3a4b8a5abb44541cb628313" }, "gitsigns.nvim": { "branch": "main", "commit": "af0f583cd35286dd6f0e3ed52622728703237e50" }, "go.nvim": { "branch": "master", "commit": "c4819d1625c59747d4a6c3de8c86e62bc2ca84ab" }, "goto-preview": { "branch": "main", "commit": "b428db4d2a5b7c06e149a020e31b2121fbf57a67" }, @@ -20,6 +22,7 @@ "mason.nvim": { "branch": "main", "commit": "cd7835b15f5a4204fc37e0aa739347472121a54c" }, "neo-tree.nvim": { "branch": "main", "commit": "1236db954ce502eb5b340bcdb69aa057cc372e8d" }, "neodev.nvim": { "branch": "main", "commit": "0491a9c7a1775fa5380ecf38d79de1a32f68cc52" }, + "neorg": { "branch": "main", "commit": "0ccc3bba3f67f6f8740b86a50aa5e1428327a741" }, "noice.nvim": { "branch": "main", "commit": "92433164e2f7118d4122c7674c3834d9511722ba" }, "nui.nvim": { "branch": "main", "commit": "c0c8e347ceac53030f5c1ece1c5a5b6a17a25b32" }, "null-ls.nvim": { "branch": "main", "commit": "0010ea927ab7c09ef0ce9bf28c2b573fc302f5a7" }, diff --git a/lua/custom/plugins/chatgpt.lua b/lua/custom/plugins/chatgpt.lua new file mode 100644 index 00000000..228ca231 --- /dev/null +++ b/lua/custom/plugins/chatgpt.lua @@ -0,0 +1,185 @@ +return { + "jackMort/ChatGPT.nvim", + event = "VeryLazy", + config = function() + require("chatgpt").setup({ + api_key_cmd = "pass chatgpt", + yank_register = "+", + edit_with_instructions = { + diff = false, + keymaps = { + close = "", + accept = "", + toggle_diff = "", + toggle_settings = "", + toggle_help = "", + cycle_windows = "", + use_output_as_input = "", + }, + }, + chat = { + welcome_message = WELCOME_MESSAGE, + loading_text = "Loading, please wait ...", + question_sign = "", -- 🙂 + answer_sign = "ﮧ", -- 🤖 + border_left_sign = "", + border_right_sign = "", + max_line_length = 120, + sessions_window = { + active_sign = "  ", + inactive_sign = "  ", + current_line_sign = "", + border = { + style = "rounded", + text = { + top = " Sessions ", + }, + }, + win_options = { + winhighlight = "Normal:Normal,FloatBorder:FloatBorder", + }, + }, + keymaps = { + close = "", + yank_last = "", + yank_last_code = "", + scroll_up = "", + scroll_down = "", + new_session = "", + cycle_windows = "", + cycle_modes = "", + next_message = "", + prev_message = "", + select_session = "", + rename_session = "r", + delete_session = "d", + draft_message = "", + edit_message = "e", + delete_message = "d", + toggle_settings = "", + toggle_sessions = "", + toggle_help = "", + toggle_message_role = "", + toggle_system_role_open = "", + stop_generating = "", + }, + }, + popup_layout = { + default = "center", + center = { + width = "80%", + height = "80%", + }, + right = { + width = "30%", + width_settings_open = "50%", + }, + }, + popup_window = { + border = { + highlight = "FloatBorder", + style = "rounded", + text = { + top = " ChatGPT ", + }, + }, + win_options = { + wrap = true, + linebreak = true, + foldcolumn = "1", + winhighlight = "Normal:Normal,FloatBorder:FloatBorder", + }, + buf_options = { + filetype = "markdown", + }, + }, + system_window = { + border = { + highlight = "FloatBorder", + style = "rounded", + text = { + top = " SYSTEM ", + }, + }, + win_options = { + wrap = true, + linebreak = true, + foldcolumn = "2", + winhighlight = "Normal:Normal,FloatBorder:FloatBorder", + }, + }, + popup_input = { + prompt = "  ", + border = { + highlight = "FloatBorder", + style = "rounded", + text = { + top_align = "center", + top = " Prompt ", + }, + }, + win_options = { + winhighlight = "Normal:Normal,FloatBorder:FloatBorder", + }, + submit = "", + submit_n = "", + max_visible_lines = 20, + }, + settings_window = { + setting_sign = "  ", + border = { + style = "rounded", + text = { + top = " Settings ", + }, + }, + win_options = { + winhighlight = "Normal:Normal,FloatBorder:FloatBorder", + }, + }, + help_window = { + setting_sign = "  ", + border = { + style = "rounded", + text = { + top = " Help ", + }, + }, + win_options = { + winhighlight = "Normal:Normal,FloatBorder:FloatBorder", + }, + }, + openai_params = { + model = "gpt-3.5-turbo", + -- model = "gpt-4", + frequency_penalty = 0, + presence_penalty = 0, + max_tokens = 300, + temperature = 0, + top_p = 1, + n = 1, + }, + openai_edit_params = { + model = "gpt-3.5-turbo", + frequency_penalty = 0, + presence_penalty = 0, + temperature = 0, + top_p = 1, + n = 1, + }, + use_openai_functions_for_edits = false, + actions_paths = {}, + show_quickfixes_cmd = "Trouble quickfix", + predefined_chat_gpt_prompts = "https://raw.githubusercontent.com/f/awesome-chatgpt-prompts/main/prompts.csv", + highlights = { + help_key = "@symbol", + help_description = "@comment", + }, + }) + end, + dependencies = { + "MunifTanjim/nui.nvim", + "nvim-lua/plenary.nvim", + "nvim-telescope/telescope.nvim" + } +} diff --git a/lua/custom/plugins/mardown_preview.lua b/lua/custom/plugins/mardown_preview.lua index 447d2c2c..ab913a36 100644 --- a/lua/custom/plugins/mardown_preview.lua +++ b/lua/custom/plugins/mardown_preview.lua @@ -1,9 +1,27 @@ return { - "iamcco/markdown-preview.nvim", - config = function() - vim.fn["mkdp#util#install"]() + "iamcco/markdown-preview.nvim", + config = function() + vim.fn["mkdp#util#install"]() + vim.keymap.set("n", "m", "MarkdownPreview") + vim.keymap.set("n", "mn", "MarkdownPreviewStop") + vim.g.mkdp_markdown_css = '~/markdown.css' + vim.g.mkdp_highlight_css = '' - vim.keymap.set("n", "m", "MarkdownPreview") - vim.keymap.set("n", "mn", "MarkdownPreviewStop") - end, + local mkdp_preview_options = { + mkit = {}, + katex = {}, + uml = {}, + maid = {}, + disable_sync_scroll = 0, + sync_scroll_type = 'middle', + hide_yaml_meta = 1, + sequence_diagrams = {}, + flowchart_diagrams = {}, + content_editable = true, + disable_filename = 1, + toc = {} + } + + vim.g.mkdp_preview_options = mkdp_preview_options + end, } diff --git a/lua/custom/plugins/norg.lua b/lua/custom/plugins/norg.lua new file mode 100644 index 00000000..da9843b9 --- /dev/null +++ b/lua/custom/plugins/norg.lua @@ -0,0 +1,35 @@ +return { + "nvim-neorg/neorg", + build = ":Neorg sync-parsers", + dependencies = { "nvim-lua/plenary.nvim" }, + config = function() + require("neorg").setup { + load = { + ["core.export"] = {}, + ["core.export.markdown"] = { + config = { + extension = "md", + }, + }, + ["core.defaults"] = {}, + ["core.concealer"] = {}, + ["core.dirman"] = { + config = { + workspaces = { + notes = "~/notes", + }, + default_workspace = "notes", + }, + }, + ["core.completion"] = { + config = { + engine = "nvim-cmp", + }, + }, + }, + } + + vim.wo.foldlevel = 99 + vim.wo.conceallevel = 2 + end, +} diff --git a/lua/custom/plugins/ollama.lua b/lua/custom/plugins/ollama.lua new file mode 100644 index 00000000..b16d20cb --- /dev/null +++ b/lua/custom/plugins/ollama.lua @@ -0,0 +1,39 @@ +return { + "David-Kunz/gen.nvim", + config = function() + require('gen').prompts['Elaborate_Text'] = { + prompt = "Elaborate the following text:\n$text", + replace = true + } + require('gen').prompts['Golang'] = { + prompt = "You are a senior Golang engineer, acting as an assitant. You offer help with backend tecnologies: \ + mongodb, gorilla/mux, algorithms, data structures, azure cloud, terraform. You answer with code examples when \ + possible. $input:\n$text", + replace = true + } + + require('gen').setup({ + model = "codellama", -- The default model to use. + display_mode = 'split', + }) + + vim.keymap.set({ 'n', 'v' }, ']', ':Gen') + vim.keymap.set('v', ']', ':Gen Enhance_Grammar_Spelling') + end + -- opts = { + -- model = "mistral", -- The default model to use. + -- display_mode = "float", -- The display mode. Can be "float" or "split". + -- show_prompt = false, -- Shows the Prompt submitted to Ollama. + -- show_model = false, -- Displays which model you are using at the beginning of your chat session. + -- no_auto_close = false, -- Never closes the window automatically. + -- init = function(options) pcall(io.popen, "ollama serve > /dev/null 2>&1 &") end, + -- -- Function to initialize Ollama + -- command = "curl --silent --no-buffer -X POST http://localhost:11434/api/generate -d $body", + -- -- The command for the Ollama service. You can use placeholders $prompt, $model and $body (shellescaped). + -- -- This can also be a lua function returning a command string, with options as the input parameter. + -- -- The executed command must return a JSON object with { response, context } + -- -- (context property is optional). + -- -- list_models = '', -- Retrieves a list of model names + -- debug = false -- Prints errors and the command which is run. + -- } +}