add some more configuration

This commit is contained in:
Eri Ishihara 2025-07-18 23:15:02 +02:00
parent 5dff3f9f68
commit 0c56817d78
3 changed files with 18 additions and 0 deletions

View file

@ -26,6 +26,14 @@ lydia is written to be easily configurable through a toml file which is easier t
## User settings
- name = the name you want lydia to call you. default is "user"
## Advanced configuration
(I wouldn't touch this unless you know what you're doing)
- temperature = the temperature you want lydia to use. basically how random the model is. default is 0.8
- max_tokens = the max context tokens you want lydia to use. basically how far she can remember. default is 8192
## Runtime configuration
the prompt can be changed by running l!prompt <text> in the chatbox. this only applies for the current session, if you want a persistent change, you can edit the config file.
# Other stuff
by hitting escape you can tab out of the chatbox, here you can do cool things like:
- hit Q or CTRL+C to quit lydia (but why would you wanna do that anyway?)

View file

@ -9,3 +9,7 @@ facefont = "mono9"
[user]
name = "user"
[advanced]
temperature = 0.8
max_tokens = 8192

View file

@ -20,6 +20,8 @@ const config = toml.parse(fs.readFileSync("./config.toml", "utf-8"));
let assistantname = config.assistant.name;
let assistantface = config.assistant.assistantface;
let assistantmodel = config.assistant.model;
let maxtokens = config.advanced.max_tokens;
let temperature = config.advanced.temperature;
let username = config.user.name;
@ -226,6 +228,10 @@ async function sendMessage(message) {
...conversationHistory,
],
stream: true,
options: {
num_predict: maxtokens,
temperature: temperature,
},
});
for await (const part of response) {