add chatgpt, norg and gen plugins
parent
da79e8d990
commit
793143c6d2
@ -1,9 +1,27 @@
|
||||
return {
|
||||
"iamcco/markdown-preview.nvim",
|
||||
config = function()
|
||||
vim.fn["mkdp#util#install"]()
|
||||
"iamcco/markdown-preview.nvim",
|
||||
config = function()
|
||||
vim.fn["mkdp#util#install"]()
|
||||
vim.keymap.set("n", "<leader>m", "<CMD>MarkdownPreview<CR>")
|
||||
vim.keymap.set("n", "<leader>mn", "<CMD>MarkdownPreviewStop<CR>")
|
||||
vim.g.mkdp_markdown_css = '~/markdown.css'
|
||||
vim.g.mkdp_highlight_css = ''
|
||||
|
||||
vim.keymap.set("n", "<leader>m", "<CMD>MarkdownPreview<CR>")
|
||||
vim.keymap.set("n", "<leader>mn", "<CMD>MarkdownPreviewStop<CR>")
|
||||
end,
|
||||
local mkdp_preview_options = {
|
||||
mkit = {},
|
||||
katex = {},
|
||||
uml = {},
|
||||
maid = {},
|
||||
disable_sync_scroll = 0,
|
||||
sync_scroll_type = 'middle',
|
||||
hide_yaml_meta = 1,
|
||||
sequence_diagrams = {},
|
||||
flowchart_diagrams = {},
|
||||
content_editable = true,
|
||||
disable_filename = 1,
|
||||
toc = {}
|
||||
}
|
||||
|
||||
vim.g.mkdp_preview_options = mkdp_preview_options
|
||||
end,
|
||||
}
|
||||
|
@ -0,0 +1,35 @@
|
||||
return {
|
||||
"nvim-neorg/neorg",
|
||||
build = ":Neorg sync-parsers",
|
||||
dependencies = { "nvim-lua/plenary.nvim" },
|
||||
config = function()
|
||||
require("neorg").setup {
|
||||
load = {
|
||||
["core.export"] = {},
|
||||
["core.export.markdown"] = {
|
||||
config = {
|
||||
extension = "md",
|
||||
},
|
||||
},
|
||||
["core.defaults"] = {},
|
||||
["core.concealer"] = {},
|
||||
["core.dirman"] = {
|
||||
config = {
|
||||
workspaces = {
|
||||
notes = "~/notes",
|
||||
},
|
||||
default_workspace = "notes",
|
||||
},
|
||||
},
|
||||
["core.completion"] = {
|
||||
config = {
|
||||
engine = "nvim-cmp",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
vim.wo.foldlevel = 99
|
||||
vim.wo.conceallevel = 2
|
||||
end,
|
||||
}
|
@ -0,0 +1,39 @@
|
||||
return {
|
||||
"David-Kunz/gen.nvim",
|
||||
config = function()
|
||||
require('gen').prompts['Elaborate_Text'] = {
|
||||
prompt = "Elaborate the following text:\n$text",
|
||||
replace = true
|
||||
}
|
||||
require('gen').prompts['Golang'] = {
|
||||
prompt = "You are a senior Golang engineer, acting as an assitant. You offer help with backend tecnologies: \
|
||||
mongodb, gorilla/mux, algorithms, data structures, azure cloud, terraform. You answer with code examples when \
|
||||
possible. $input:\n$text",
|
||||
replace = true
|
||||
}
|
||||
|
||||
require('gen').setup({
|
||||
model = "codellama", -- The default model to use.
|
||||
display_mode = 'split',
|
||||
})
|
||||
|
||||
vim.keymap.set({ 'n', 'v' }, '<leader>]', ':Gen<CR>')
|
||||
vim.keymap.set('v', '<leader>]', ':Gen Enhance_Grammar_Spelling<CR>')
|
||||
end
|
||||
-- opts = {
|
||||
-- model = "mistral", -- The default model to use.
|
||||
-- display_mode = "float", -- The display mode. Can be "float" or "split".
|
||||
-- show_prompt = false, -- Shows the Prompt submitted to Ollama.
|
||||
-- show_model = false, -- Displays which model you are using at the beginning of your chat session.
|
||||
-- no_auto_close = false, -- Never closes the window automatically.
|
||||
-- init = function(options) pcall(io.popen, "ollama serve > /dev/null 2>&1 &") end,
|
||||
-- -- Function to initialize Ollama
|
||||
-- command = "curl --silent --no-buffer -X POST http://localhost:11434/api/generate -d $body",
|
||||
-- -- The command for the Ollama service. You can use placeholders $prompt, $model and $body (shellescaped).
|
||||
-- -- This can also be a lua function returning a command string, with options as the input parameter.
|
||||
-- -- The executed command must return a JSON object with { response, context }
|
||||
-- -- (context property is optional).
|
||||
-- -- list_models = '<omitted lua function>', -- Retrieves a list of model names
|
||||
-- debug = false -- Prints errors and the command which is run.
|
||||
-- }
|
||||
}
|
Loading…
Reference in New Issue