llama_cpp_dir_path: "/home/user/AI/llama.cpp/"
batch_thread_count: 10
thread_count: 6
+ prompts_directory: "/home/user/.config/alyverkko-cli/prompts"
models:
- alias: "default"
filesystem_path: "WizardLM-2-8x22B.Q5_K_M-00001-of-00005.gguf"
filesystem_path: "daringmaid-20b.Q4_K_M.gguf"
context_size_tokens: 4096
end_of_text_marker: null
- prompts:
- - alias: "default"
- prompt: |
- This conversation involves a user and AI assistant where the AI
- is expected to provide not only immediate responses but also detailed and
- well-reasoned analysis. The AI should consider all aspects of the query
- and deliver insights based on logical deductions and comprehensive understanding.
- AI assistant should reply using emacs org-mode syntax.
- Quick recap: *this is bold* [[http://domain.org][This is link]]
- ,* Heading level 1
- ,** Heading level 2
- | Col 1 Row 1 | Col 2 Row 1 |
- | Col 1 Row 2 | Col 2 Row 2 |
- ,#+BEGIN_SRC python
- print ('Hello, world!')
- ,#+END_SRC
-
- - alias: "writer"
- prompt: |
- You are best-selling book writer.
#+end_src
*** Configuration file syntax
- models_directory :: Directory where AI models are stored.
- This option is mandatory.
+- prompts_directory :: Directory where prompts are stored.
+
+ Example prompts directory content:
+ #+begin_verse
+ default.txt
+ writer.txt
+ #+end_verse
+
+ Prompt name is file name without extension. File extension should be
+ *txt*.
+
+ Example content for *writer.txt*:
+ : You are best-selling book writer.
+
- default_temperature :: Defines the default temperature for AI
responses, affecting randomness in the generation process. Lower
values make the AI more deterministic and higher values make it more
identify and remove them so that they don't leak into
conversation. Default value is: *null*.
-- prompts :: List of predefined system prompts for AI.
- - alias :: Short prompt alias. Prompt with alias "default" will be used by default.
- - prompt :: Actual prompt that will be sent to AI alongside actual
- user question.
-
*** Enlisting available models
Once Älyverkko CLI is installed and properly configured, you can run
following command at commandline to see what models are available to
allChecksPassed = false;
}
- // Validate prompts
- if (Main.configuration.getPrompts().isEmpty()) {
- System.err.println("No prompts are defined in the configuration.");
- allChecksPassed = false;
+
+ // Ensure that there is at least one prompt file
+ File promptsDirectory = Main.configuration.getPromptsDirectory();
+ if (promptsDirectory == null) {
+ System.err.println("Prompts directory is not defined in the configuration.");
+ allChecksPassed = false;
+ } else {
+ // Validate prompts directory
+ if (!promptsDirectory.exists() || !promptsDirectory.isDirectory()) {
+ System.err.println("Prompts directory does not exist or is not a directory: " + promptsDirectory);
+ allChecksPassed = false;
+ } else {
+ if (promptsDirectory.listFiles() == null || promptsDirectory.listFiles().length == 0) {
+ System.err.println("No prompt files found in the prompts directory: " + promptsDirectory);
+ allChecksPassed = false;
+ }
+ }
}
// Validate models
import java.io.*;
import java.util.List;
+import static eu.svjatoslav.commons.file.IOHelper.getFileContentsAsString;
+
@Data
public class Configuration {
@JsonProperty("thread_count")
private int threadCount;
- @JsonProperty("prompts")
- private List<Prompt> prompts;
+ @JsonProperty("prompts_directory")
+ private File promptsDirectory;
private List<ConfigurationModel> models;
return mapper.readValue(configFile, Configuration.class);
}
- public String getPromptByAlias(String alias) {
- //System.out.println("Prompts: " + prompts);
-
- for (Prompt prompt : prompts) {
- if (prompt.getAlias().equals(alias)) {
- return prompt.getPrompt();
- }
- }
-
- return null;
+ public String getPromptByAlias(String alias) throws IOException {
+ File promptFile = new File(promptsDirectory, alias + ".txt");
+ return getFileContentsAsString(promptFile);
}
}