mirror of https://github.com/LazyVim/starter
pull/111/head
parent
abfea67128
commit
3d8164bdda
|
@ -60,6 +60,22 @@ return {
|
||||||
temperature = 0,
|
temperature = 0,
|
||||||
max_tokens = 4096,
|
max_tokens = 4096,
|
||||||
},
|
},
|
||||||
|
openai = {
|
||||||
|
endpoint = "https://api.openai.com/v1",
|
||||||
|
model = "gpt-4o",
|
||||||
|
temperature = 0,
|
||||||
|
max_tokens = 4096,
|
||||||
|
},
|
||||||
|
vendors = {
|
||||||
|
ollama = {
|
||||||
|
__inherited_from = "openai",
|
||||||
|
api_key_name = "",
|
||||||
|
endpoint = "http://127.0.0.1:11434/v1",
|
||||||
|
model = "qwen2.5-coder:7b",
|
||||||
|
-- model = "codellama:7b",
|
||||||
|
-- model = "deepseek-r1:7b",
|
||||||
|
},
|
||||||
|
},
|
||||||
file_selector = {
|
file_selector = {
|
||||||
provider = "fzf",
|
provider = "fzf",
|
||||||
provider_opts = {},
|
provider_opts = {},
|
||||||
|
@ -83,15 +99,6 @@ return {
|
||||||
config.provider = "ollama"
|
config.provider = "ollama"
|
||||||
config.auto_suggestions_provider = "ollama"
|
config.auto_suggestions_provider = "ollama"
|
||||||
config.behaviour.auto_suggestions = true
|
config.behaviour.auto_suggestions = true
|
||||||
config.vendors = {
|
|
||||||
ollama = {
|
|
||||||
__inherited_from = "openai",
|
|
||||||
api_key_name = "",
|
|
||||||
endpoint = "http://127.0.0.1:11434/v1",
|
|
||||||
model = "qwen2.5-coder:7b",
|
|
||||||
-- model = "deepseek-r1:7b",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
end
|
end
|
||||||
|
|
||||||
return config
|
return config
|
||||||
|
|
Loading…
Reference in New Issue