batch_thread_count: 10
thread_count: 6
models:
- - alias: "wizard"
+ - alias: "default"
filesystem_path: "WizardLM-2-8x22B.Q5_K_M-00001-of-00005.gguf"
context_size_tokens: 64000
end_of_text_marker: null
+ - alias: "maid"
+ filesystem_path: "daringmaid-20b.Q4_K_M.gguf"
+ context_size_tokens: 4096
+ end_of_text_marker: null
+prompts:
+ - alias: "default"
+ prompt: |
+ This conversation involves a user and AI assistant where the AI
+ is expected to provide not only immediate responses but also detailed and
+ well-reasoned analysis. The AI should consider all aspects of the query
+ and deliver insights based on logical deductions and comprehensive understanding.
+ AI assistant should reply using emacs org-mode syntax.
+ Quick recap: *this is bold* [[http://domain.org][This is link]]
+ * Heading level 1
+ ** Heading level 2
+ | Col 1 Row 1 | Col 2 Row 1 |
+ | Col 1 Row 2 | Col 2 Row 2 |
+ #+BEGIN_SRC python
+ print ('Hello, world!')
+ #+END_SRC
+ - alias: "writer"
+ prompt: |
+ You are best-selling book writer.
batch_thread_count: 10
thread_count: 6
models:
- - alias: "wizard"
+ - alias: "default"
filesystem_path: "WizardLM-2-8x22B.Q5_K_M-00001-of-00005.gguf"
context_size_tokens: 64000
end_of_text_marker: null
filesystem_path: "daringmaid-20b.Q4_K_M.gguf"
context_size_tokens: 4096
end_of_text_marker: null
-
+ prompts:
+ - alias: "default"
+ prompt: |
+ This conversation involves a user and AI assistant where the AI
+ is expected to provide not only immediate responses but also detailed and
+ well-reasoned analysis. The AI should consider all aspects of the query
+ and deliver insights based on logical deductions and comprehensive understanding.
+ AI assistant should reply using emacs org-mode syntax.
+ Quick recap: *this is bold* [[http://domain.org][This is link]]
+ ,* Heading level 1
+ ,** Heading level 2
+ | Col 1 Row 1 | Col 2 Row 1 |
+ | Col 1 Row 2 | Col 2 Row 2 |
+ ,#+BEGIN_SRC python
+ print ('Hello, world!')
+ ,#+END_SRC
+
+ - alias: "writer"
+ prompt: |
+ You are best-selling book writer.
#+end_src
*** Configuration file syntax
keep CPU cores unnecessarily busy.
- Default value: 6
-- models :: List of available large language models. First model in
- the list would be used by default.
-
+- models :: List of available large language models.
- alias :: Short model alias.
- filesystem_path :: File name of the model as located within
*models_directory*
identify and remove them so that they don't leak into
conversation. Default value is: *null*.
+- prompts :: List of predefined system prompts for AI.
+ - alias :: Short prompt alias.
+ - prompt :: Actual prompt that will be sent to AI alongside actual
+ user question.
+
+
+*WARNING: MODEL SELECTION AND PROMPT SELECTION IS CURRENTLY NOT IMPLEMENTED*
+
+While it is possible to configure many prompts and models, at the
+moment Älyverkko CLI will always choose model and prompt with
+"default" alias. This is going to be fixed soon.
+
+
*** Enlisting available models
Once Älyverkko CLI is installed and properly configured, you can run
following command at commandline to see what models are available to
package eu.svjatoslav.alyverkko_cli;
+import eu.svjatoslav.alyverkko_cli.commands.MailQuery;
import eu.svjatoslav.alyverkko_cli.model.Model;
import java.io.*;
public static final String AI_RESPONSE_MARKER = "ASSISTANT:";
private static final String LLAMA_CPP_META_INFO_MARKER = "llm_load_print_meta: ";
- private final String aiQuery;
private final Model model;
private final Float temperature;
+ private final String systemPrompt;
+ private final String userPrompt;
File inputFile;
/**
* Creates a new AI task.
- *
- * @param input Problem statement to be used for the AI task.
- * @param model The model to be used for the AI task.
- * @param temperature The temperature to be used for the AI inference process.
*/
- public AiTask(String input, Model model, Float temperature) {
- this.aiQuery = buildAiQuery(input);
- this.model = model;
- this.temperature = temperature == null ? configuration.getDefaultTemperature() : temperature;
+ public AiTask(MailQuery mailQuery) {
+ this.model = mailQuery.model;
+ this.temperature = configuration.getDefaultTemperature();
+ this.systemPrompt = mailQuery.systemPrompt;
+ this.userPrompt = mailQuery.userPrompt;
}
- private String buildAiQuery(String input) {
+ private String buildAiQuery() {
StringBuilder sb = new StringBuilder();
+ sb.append("SYSTEM:\n").append(systemPrompt).append("\n");
- sb.append("SYSTEM:\nThis conversation involves a user and AI assistant where the AI " +
- "is expected to provide not only immediate responses but also detailed and " +
- "well-reasoned analysis. The AI should consider all aspects of the query " +
- "and deliver insights based on logical deductions and comprehensive understanding." +
- "AI assistant should reply using emacs org-mode syntax.\n" +
- "Quick recap: *this is bold* [[http://domain.org][This is link]]\n" +
- "* Heading level 1\n" +
- "** Heading level 2\n" +
- "| Col 1 Row 1 | Col 2 Row 1 |\n" +
- "| Col 1 Row 2 | Col 2 Row 2 |\n" +
- "#+BEGIN_SRC python\n" +
- " print ('Hello, world!')\n" +
- "#+END_SRC\n\n");
-
-
- String filteredInput = filterParticipantsInUserInput(input);
-
- // if filtered input does not start with "USER:", add it
- if (!filteredInput.startsWith("USER:")) {
- filteredInput = "USER:\n" + filteredInput;
- }
+ String filteredUserPrompt = filterParticipantsInUserInput(userPrompt);
+ if (!filteredUserPrompt.startsWith("USER:")) sb.append("USER:\n");
+ sb.append(filteredUserPrompt).append("\n");
- sb.append(filteredInput).append("\n").append(AI_RESPONSE_MARKER);
+ sb.append(AI_RESPONSE_MARKER);
return sb.toString();
}
*/
public String runAiQuery() throws InterruptedException, IOException {
try {
- initializeInputFile();
+ initializeInputFile(buildAiQuery());
ProcessBuilder processBuilder = new ProcessBuilder();
processBuilder.command(getCliCommand().split("\\s+")); // Splitting the command string into parts
/**
* Initializes the input file for the AI task.
*/
- private void initializeInputFile() throws IOException {
+ private void initializeInputFile(String aiQuery ) throws IOException {
// write AI input to file
inputFile = createTemporaryFile();
Files.write(inputFile.toPath(), aiQuery.getBytes());
}
}
- public static String runAiQuery(String problemStatement, Model model, Float temperature) throws IOException, InterruptedException {
- AiTask ai = new AiTask(problemStatement, model, temperature);
- return ai.runAiQuery();
- }
}
package eu.svjatoslav.alyverkko_cli.commands;
import eu.svjatoslav.alyverkko_cli.*;
-import eu.svjatoslav.alyverkko_cli.model.Model;
import eu.svjatoslav.alyverkko_cli.model.ModelLibrary;
import eu.svjatoslav.commons.cli_helper.parameter_parser.Parser;
import eu.svjatoslav.commons.cli_helper.parameter_parser.parameter.DirectoryOption;
System.out.println("\nReplying to mail: " + file.getName());
// Read the mail contents, and remove the TOCOMPUTE: prefix from the first line
- String mailContents = getFileContentsAsString(file);
- mailContents = removeToComputePrefixFile(mailContents);
+ String inputFileContent = getFileContentsAsString(file);
+ MailQuery mailQuery = parseInputFileContent(inputFileContent);
- // faster model for testing for development time testing
- // String modelAlias = "maid";
- // TODO: make model CLI argument
- String modelAlias = "wizard";
+ AiTask aiTask = new AiTask(mailQuery);
+ String aiGeneratedResponse = aiTask.runAiQuery();
- Model model = modelLibrary.findModelByAlias(modelAlias).get();
- String aiGeneratedResponse = AiTask.runAiQuery(mailContents, model, null);
+ // Prepare result file content
+ StringBuilder resultFileContent = new StringBuilder();
+ if (!mailQuery.userPrompt.startsWith("* USER:\n")) resultFileContent.append("* USER:\n");
+ resultFileContent.append(mailQuery.userPrompt).append("\n");
+ resultFileContent.append("* ASSISTANT:\n").append(aiGeneratedResponse).append("\n");
- // Append the AI response to the mail contents
- if (!mailContents.startsWith("* USER:\n")) {
- mailContents = "* USER:\n" + mailContents;
- }
-
- String newMailContents = mailContents + "\n* ASSISTANT:\n" + aiGeneratedResponse;
-
- // Write the result to the file
- saveToFile(file, newMailContents);
+ // Write result content to the file
+ saveToFile(file, resultFileContent.toString());
}
- private String removeToComputePrefixFile(String mailContents) {
- // Remove the first line from the mail contents
- int firstNewLineIndex = mailContents.indexOf('\n');
- if (firstNewLineIndex != -1) {
- mailContents = mailContents.substring(firstNewLineIndex + 1);
+ private MailQuery parseInputFileContent(String inputFileContent) {
+ MailQuery mailQuery = new MailQuery();
+
+ // deduct user prompt
+ int firstNewLineIndex = inputFileContent.indexOf('\n');
+ if (firstNewLineIndex == -1) {
+ throw new IllegalArgumentException("Input file is only one line long. This is the content: " + inputFileContent);
+ } else {
+ mailQuery.userPrompt = inputFileContent.substring(firstNewLineIndex + 1);
}
- return mailContents;
+
+ String firstLine = inputFileContent.substring(0, firstNewLineIndex);
+ //System.out.println("First line is: \"" + firstLine + "\"");
+
+ mailQuery.systemPrompt = configuration.getPromptByAlias("default");
+ mailQuery.model = modelLibrary.findModelByAlias("default").get();
+ return mailQuery;
}
--- /dev/null
+package eu.svjatoslav.alyverkko_cli.commands;
+
+import eu.svjatoslav.alyverkko_cli.model.Model;
+
+public class MailQuery {
+ public String systemPrompt;
+ public String userPrompt;
+ public Model model;
+}
import java.io.*;
import java.util.List;
+import java.util.Map;
public class Configuration {
@JsonProperty("thread_count")
private int threadCount;
+ @JsonProperty("prompts")
+ private List<Prompt> prompts;
+
+ private List<ConfigurationModel> models;
+
public List<ConfigurationModel> getModels() {
return models;
}
this.models = models;
}
- private List<ConfigurationModel> models;
-
public Configuration() {
}
return mapper.readValue(configFile, Configuration.class);
}
+ public List<Prompt> getPrompts() {
+ return prompts;
+ }
+
+ public void setPrompts(List<Prompt> prompts) {
+ this.prompts = prompts;
+ }
public File getMailDirectory() {
return mailDirectory;
public void setThreadCount(int threadCount) {
this.threadCount = threadCount;
}
+
+ public String getPromptByAlias(String alias) {
+ //System.out.println("Prompts: " + prompts);
+
+ for (Prompt prompt : prompts) {
+ if (prompt.getAlias().equals(alias)) {
+ return prompt.getPrompt();
+ }
+ }
+
+ return null;
+ }
}
--- /dev/null
+package eu.svjatoslav.alyverkko_cli.configuration;
+
+public class Prompt {
+
+ private String alias;
+ private String prompt;
+
+ public String getAlias() {
+ return alias;
+ }
+
+ public void setAlias(String alias) {
+ this.alias = alias;
+ }
+
+ public String getPrompt() {
+ return prompt;
+ }
+
+ public void setPrompt(String prompt) {
+ this.prompt = prompt;
+ }
+
+ @Override
+ public String toString() {
+ return "Prompt{" +
+ "alias='" + alias + '\'' +
+ ", prompt='" + prompt + '\'' +
+ '}';
+ }
+}