diff --git a/README.md b/README.md index 1136b1f..c4e2748 100644 --- a/README.md +++ b/README.md @@ -4,8 +4,10 @@ your friendly ai assistant. frontend for ollama. > fair warning: lydia is very stupid # Installation +- git clone repository - install ollama from `https://ollama.ai/download` -- pull a model from ollama -- copy config.example.toml to config.toml and edit it to have your model +- pull a model from ollama (i recommend gemma3n:e4b for weak PCs like mine (i7-10750h + rtx 3050ti laptop edition)) +- copy config.example.toml to config.toml and edit it to have the model you selected, optionally set your name in [user] +- npm i - node index.js - enjoy diff --git a/lydia.js b/lydia.js index f87e1fd..3242a73 100644 --- a/lydia.js +++ b/lydia.js @@ -4,6 +4,17 @@ import { execSync } from "child_process"; import toml from "toml"; import fs from "fs"; +if (!fs.existsSync("./config.toml")) { + if (fs.existsSync("./config.example.toml")) { + fs.copyFileSync("./config.example.toml", "./config.toml"); + } else { + console.error( + "error: config.example.toml not found. git pull from repository", + ); + process.exit(1); + } +} + const config = toml.parse(fs.readFileSync("./config.toml", "utf-8")); const assistantname = config.assistant.name; @@ -238,7 +249,7 @@ inputBox.on("submit", async (text) => { case "help": addMessage( assistantname, - "available commands:\nl!help - if you wanna know what i can do, run this\nl!clear - clear chat history, if you want me to forget everything, just run this!\nl!face - if you want to force my expression, here you go! not sure i'll be too happy about it though.", + "available commands:\nl!help - if you wanna know what i can do, run this!\nl!clear - clear chat history, if you want me to forget everything, just run this!\nl!face - if you want to force my expression, here you go! not sure i'll be too happy about it though.", ); break; case "clear":