-
Notifications
You must be signed in to change notification settings - Fork 33
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Debounce, LlamaCpp support, expose prompt as setup option, fix passing parameters to model (ollama) #11
base: main
Are you sure you want to change the base?
Debounce, LlamaCpp support, expose prompt as setup option, fix passing parameters to model (ollama) #11
Changes from all commits
d04b8f4
edb3d59
71d051e
79d4b29
6018a42
1dc3cb4
490e5eb
7910988
41dda5b
865035f
a69f521
30e63f0
071d701
2f8e1b0
4442111
84f2fea
9633b8e
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,58 @@ | ||
local requests = require('cmp_ai.requests') | ||
|
||
DocileLlamaCpp = requests:new(nil) | ||
|
||
|
||
function DocileLlamaCpp:new(o, params) | ||
o = o or {} | ||
setmetatable(o, self) | ||
self.__index = self | ||
self.params = vim.tbl_deep_extend('keep', o or {}, { | ||
base_url = 'http://localhost:5000/forward', | ||
-- model = 'codellama:7b-code', | ||
options = { | ||
temperature = 0.2, | ||
}, | ||
}) | ||
return o | ||
end | ||
|
||
function DocileLlamaCpp:complete(lines_before, lines_after, cb) | ||
local data = { | ||
-- model = self.params.model, | ||
-- prompt = '<PRE> ' .. lines_before .. ' <SUF>' .. lines_after .. ' <MID>', -- for codellama | ||
prompt = "<s><|fim▁begin|>" .. lines_before .. "<|fim▁hole|>" .. lines_after .. "<|fim▁end|>", -- for deepseek coder | ||
stream = false, | ||
} | ||
data = vim.tbl_extend('keep', data, self.params.options) | ||
data.prompt = self.params.prompt(lines_before, lines_after) | ||
|
||
self:Get(self.params.base_url, {}, data, function(answer) | ||
local new_data = {} | ||
-- vim.print('answer', answer) | ||
if answer.error ~= nil then | ||
vim.notify('Docile error: ' .. answer.error) | ||
return | ||
end | ||
if answer.stop then | ||
local result = answer.content:gsub('<EOT>', '') | ||
|
||
-- detect if 'CodeQwen' string in answer.generation_settings.model | ||
if string.find(answer.generation_settings.model, 'CodeQwen') then | ||
-- also get rid first letter - which is always the same space - but only for CodeQwen.... | ||
result = result:gsub('^.', '') | ||
end | ||
-- vim.print('results', result) | ||
table.insert(new_data, result) | ||
end | ||
cb(new_data) | ||
end) | ||
end | ||
|
||
function DocileLlamaCpp:test() | ||
self:complete('def factorial(n)\n if', ' return ans\n', function(data) | ||
dump(data) | ||
end) | ||
end | ||
|
||
return DocileLlamaCpp |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,51 @@ | ||
local requests = require('cmp_ai.requests') | ||
|
||
LlamaCpp = requests:new(nil) | ||
|
||
function LlamaCpp:new(o, params) | ||
o = o or {} | ||
setmetatable(o, self) | ||
self.__index = self | ||
self.params = vim.tbl_deep_extend('keep', o or {}, { | ||
base_url = 'http://localhost:8080/completion', | ||
-- model = 'codellama:7b-code', | ||
options = { | ||
temperature = 0.2, | ||
}, | ||
}) | ||
return o | ||
end | ||
|
||
function LlamaCpp:complete(lines_before, lines_after, cb) | ||
local data = { | ||
-- model = self.params.model, | ||
-- prompt = '<PRE> ' .. lines_before .. ' <SUF>' .. lines_after .. ' <MID>', -- for codellama | ||
prompt = "<s><|fim▁begin|>" .. lines_before .. "<|fim▁hole|>" .. lines_after .. "<|fim▁end|>", -- for deepseek coder | ||
stream = false, | ||
} | ||
data = vim.tbl_extend('keep', data, self.params.options) | ||
data.prompt = self.params.prompt(lines_before, lines_after) | ||
|
||
self:Get(self.params.base_url, {}, data, function(answer) | ||
local new_data = {} | ||
-- vim.print('answer', answer) | ||
if answer.error ~= nil then | ||
vim.notify('Llamacp error: ' .. answer.error) | ||
return | ||
end | ||
if answer.stop then | ||
local result = answer.content:gsub('<EOT>', '') | ||
-- vim.print('results', result) | ||
table.insert(new_data, result) | ||
end | ||
cb(new_data) | ||
end) | ||
end | ||
|
||
function LlamaCpp:test() | ||
self:complete('def factorial(n)\n if', ' return ans\n', function(data) | ||
dump(data) | ||
end) | ||
end | ||
|
||
return LlamaCpp |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -40,19 +40,70 @@ function Source:_do_complete(ctx, cb) | |
local service = conf:get('provider') | ||
service:complete(before, after, function(data) | ||
self:end_complete(data, ctx, cb) | ||
if conf:get('notify') then | ||
conf:get('notify_callback')('Completion started') | ||
end | ||
-- why 2x ? | ||
-- if conf:get('notify') then | ||
-- conf:get('notify_callback')('Completion started') | ||
-- end | ||
end) | ||
end | ||
|
||
function Source:trigger(ctx, callback) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I am not comfortable with this entire debounce concept. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Built in debounce in cmp - has issue where it is not working as it should:
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. ok, looking at cmp sources I can only agree. |
||
if vim.fn.mode() == 'i' then | ||
self:_do_complete(ctx, callback) | ||
end | ||
end | ||
|
||
-- based on https://github.com/runiq/neovim-throttle-debounce/blob/main/lua/throttle-debounce/init.lua (MIT) | ||
local function debounce_trailing(fn, ms) | ||
local timer = vim.loop.new_timer() | ||
local wrapped_fn | ||
|
||
function wrapped_fn(...) | ||
local argv = {...} | ||
local argc = select('#', ...) | ||
-- timer:stop() -- seems not needed? | ||
timer:start(ms, 0, function() | ||
pcall(vim.schedule_wrap(fn), unpack(argv, 1, argc)) | ||
end) | ||
end | ||
return wrapped_fn, timer | ||
end | ||
|
||
local bounce_complete, ret_tim = debounce_trailing( | ||
Source.trigger, | ||
conf:get('debounce_delay') | ||
) | ||
|
||
local self_cp, ctx_cp, call_cp -- variables to store last completion context | ||
|
||
local bounce_autogroup = vim.api.nvim_create_augroup("BounceCompletion", { clear = true }) | ||
vim.api.nvim_create_autocmd({"TextChangedI","InsertEnter","TextChangedP"},{ | ||
pattern = "*", | ||
callback = function() | ||
if self_cp ~= nil then | ||
bounce_complete(self_cp, ctx_cp, call_cp) | ||
end | ||
end, | ||
group = bounce_autogroup | ||
}) | ||
|
||
vim.api.nvim_create_autocmd({"InsertLeave"},{ | ||
pattern = "*", | ||
callback = function() | ||
ret_tim:stop() | ||
end, | ||
group = bounce_autogroup | ||
}) | ||
|
||
|
||
--- complete | ||
function Source:complete(ctx, callback) | ||
if conf:get('ignored_file_types')[vim.bo.filetype] then | ||
callback() | ||
return | ||
end | ||
self:_do_complete(ctx, callback) | ||
self_cp, ctx_cp, call_cp = self, ctx, callback | ||
bounce_complete(self_cp, ctx, callback) | ||
end | ||
|
||
function Source:end_complete(data, ctx, cb) | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Does this bug apply to the
openai
orbard
backends as well? They similarly useparams
instead ofo
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I do not know, since I do not used Bard and openai. I can only assume that if they work ok, then this line is not needed.