From abeeda05ce37018b1e5dc64a37350385677f5cb6 Mon Sep 17 00:00:00 2001 From: root Date: Tue, 9 Sep 2025 08:41:51 +0000 Subject: [PATCH] update config --- init.lua | 196 +++++++++++++++++++++---------------------------------- 1 file changed, 76 insertions(+), 120 deletions(-) diff --git a/init.lua b/init.lua index b4c697c1..427b2eee 100644 --- a/init.lua +++ b/init.lua @@ -1116,126 +1116,6 @@ require('lazy').setup({ vim.g.db_ui_use_nerd_fonts = 1 end, }, - { - 'yacineMTB/dingllm.nvim', - dependencies = { 'nvim-lua/plenary.nvim' }, - config = function() - local system_prompt = - 'You should replace the code that you are sent, only following the comments. Do not talk at all. Only output valid code. Do not provide any backticks that surround the code. Never ever output backticks like this ```. Any comment that is asking you for something should be removed after you satisfy them. Other comments should left alone. Do not output backticks' - local helpful_prompt = 'You are a helpful assistant. What I have sent are my notes so far.' - local dingllm = require 'dingllm' - - local function handle_open_router_spec_data(data_stream) - local success, json = pcall(vim.json.decode, data_stream) - if success then - if json.choices and json.choices[1] and json.choices[1].text then - local content = json.choices[1].text - if content then - dingllm.write_string_at_cursor(content) - end - end - else - print('non json ' .. data_stream) - end - end - - local function custom_make_openai_spec_curl_args(opts, prompt) - local url = opts.url - local api_key = opts.api_key_name and os.getenv(opts.api_key_name) - local data = { - prompt = prompt, - model = opts.model, - temperature = 0.7, - stream = true, - } - local args = { '-N', '-X', 'POST', '-H', 'Content-Type: application/json', '-d', vim.json.encode(data) } - if api_key then - table.insert(args, '-H') - table.insert(args, 'Authorization: Bearer ' .. api_key) - end - table.insert(args, url) - return args - end - - local function llama_405b_base() - dingllm.invoke_llm_and_stream_into_editor({ - url = 'https://openrouter.ai/api/v1/chat/completions', - model = 'meta-llama/llama-3.1-405b', - api_key_name = 'OPENROUTER_API_KEY', - max_tokens = '128', - replace = false, - }, custom_make_openai_spec_curl_args, handle_open_router_spec_data) - end - - local function groq_replace() - dingllm.invoke_llm_and_stream_into_editor({ - url = 'https://api.groq.com/openai/v1/chat/completions', - model = 'llama-3.1-70b-versatile', - api_key_name = 'GROQ_API_KEY', - system_prompt = system_prompt, - replace = true, - }, dingllm.make_openai_spec_curl_args, dingllm.handle_openai_spec_data) - end - - local function groq_help() - dingllm.invoke_llm_and_stream_into_editor({ - url = 'https://api.groq.com/openai/v1/chat/completions', - model = 'llama-3.1-70b-versatile', - api_key_name = 'GROQ_API_KEY', - system_prompt = helpful_prompt, - replace = false, - }, dingllm.make_openai_spec_curl_args, dingllm.handle_openai_spec_data) - end - - local function llama405b_replace() - dingllm.invoke_llm_and_stream_into_editor({ - url = 'https://api.lambdalabs.com/v1/chat/completions', - model = 'hermes-3-llama-3.1-405b-fp8', - api_key_name = 'LAMBDA_API_KEY', - system_prompt = system_prompt, - replace = true, - }, dingllm.make_openai_spec_curl_args, dingllm.handle_openai_spec_data) - end - - local function llama405b_help() - dingllm.invoke_llm_and_stream_into_editor({ - url = 'https://api.lambdalabs.com/v1/chat/completions', - model = 'hermes-3-llama-3.1-405b-fp8', - api_key_name = 'LAMBDA_API_KEY', - system_prompt = helpful_prompt, - replace = false, - }, dingllm.make_openai_spec_curl_args, dingllm.handle_openai_spec_data) - end - - local function anthropic_help() - dingllm.invoke_llm_and_stream_into_editor({ - url = 'https://api.anthropic.com/v1/messages', - model = 'claude-3-5-sonnet-20240620', - api_key_name = 'ANTHROPIC_API_KEY', - system_prompt = helpful_prompt, - replace = false, - }, dingllm.make_anthropic_spec_curl_args, dingllm.handle_anthropic_spec_data) - end - - local function anthropic_replace() - dingllm.invoke_llm_and_stream_into_editor({ - url = 'https://api.anthropic.com/v1/messages', - model = 'claude-3-5-sonnet-20240620', - api_key_name = 'ANTHROPIC_API_KEY', - system_prompt = system_prompt, - replace = true, - }, dingllm.make_anthropic_spec_curl_args, dingllm.handle_anthropic_spec_data) - end - - -- vim.keymap.set({ 'n', 'v' }, 'k', groq_replace, { desc = 'llm groq' }) - -- vim.keymap.set({ 'n', 'v' }, 'K', groq_help, { desc = 'llm groq_help' }) - -- vim.keymap.set({ 'n', 'v' }, 'L', llama405b_help, { desc = 'llm llama405b_help' }) - -- vim.keymap.set({ 'n', 'v' }, 'l', llama405b_replace, { desc = 'llm llama405b_replace' }) - vim.keymap.set({ 'n', 'v' }, 'I', anthropic_help, { desc = 'llm anthropic_help' }) - vim.keymap.set({ 'n', 'v' }, 'i', anthropic_replace, { desc = 'llm anthropic' }) - -- vim.keymap.set({ 'n', 'v' }, 'o', llama_405b_base, { desc = 'llama base' }) - end, - }, { 'folke/snacks.nvim', priority = 1000, @@ -1527,6 +1407,82 @@ require('lazy').setup({ desc = 'Explain code near cursor', }, }, + + { + 'dmtrKovalenko/fff.nvim', + build = 'cargo build --release', + -- or if you are using nixos + -- build = "nix run .#release", + opts = { -- (optional) + debug = { + enabled = true, -- we expect your collaboration at least during the beta + show_scores = true, -- to help us optimize the scoring system, feel free to share your scores! + }, + }, + -- No need to lazy-load with lazy.nvim. + -- This plugin initializes itself lazily. + lazy = false, + keys = { + { + 'ff', -- try it if you didn't it is a banger keybinding for a picker + function() + require('fff').find_files() + end, + desc = 'FFFind files', + }, + }, + }, + { + 'oribarilan/lensline.nvim', + tag = '1.1.0', -- or: branch = 'release/1.x' for latest non-breaking updates + event = 'LspAttach', + config = function() + require('lensline').setup { + providers = { -- Array format: order determines display sequence + { + name = 'references', + enabled = true, -- enable references provider + quiet_lsp = true, -- suppress noisy LSP log messages (e.g., Pyright reference spam) + }, + { + name = 'last_author', + enabled = true, -- enabled by default with caching optimization + cache_max_files = 50, -- maximum number of files to cache blame data for (default: 50) + }, + -- built-in providers that are diabled by default: + { + name = 'diagnostics', + enabled = false, -- disabled by default - enable explicitly to use + min_level = 'WARN', -- only show WARN and ERROR by default (HINT, INFO, WARN, ERROR) + }, + { + name = 'complexity', + enabled = true, -- disabled by default - enable explicitly to use + min_level = 'L', -- only show L (Large) and XL (Extra Large) complexity by default + }, + }, + style = { + separator = ' • ', -- separator between all lens attributes + highlight = 'Comment', -- highlight group for lens text + prefix = '┃ ', -- prefix before lens content + placement = 'above', -- "above" | "inline" - where to render lenses (consider prefix = "" for inline) + use_nerdfont = true, -- enable nerd font icons in built-in providers + }, + render = 'all', -- "all" | "focused" (only active window's focused function) + limits = { + exclude = { + -- see config.lua for extensive list of default patterns + }, + exclude_gitignored = true, -- respect .gitignore by not processing ignored files + max_lines = 1000, -- process only first N lines of large files + max_lenses = 70, -- skip rendering if too many lenses generated + }, + debounce_ms = 500, -- unified debounce delay for all providers + focused_debounce_ms = 150, -- debounce delay for focus tracking in focused mode + debug_mode = false, -- enable debug output for development, see CONTRIBUTE.md + } + end, + }, }, -- The following comments only work if you have downloaded the kickstart repo, not just copy pasted the