import static eu.svjatoslav.alyverkko_cli.Main.configuration;
import static java.lang.String.join;
+/**
+ * Encapsulates the process of running an AI inference query via
+ * llama.cpp. It prepares an input file, spawns the process, collects
+ * output, and cleans up temporary files.
+ */
public class AiTask {
+
+ /**
+ * Marker for the AI's response block, used in the constructed prompt string.
+ */
public static final String AI_RESPONSE_MARKER = "ASSISTANT:";
+
+ /**
+ * Marker used by llama.cpp to print metadata. We monitor and display these lines.
+ */
private static final String LLAMA_CPP_META_INFO_MARKER = "llm_load_print_meta: ";
- MailQuery mailQuery;
+ /**
+ * The mail query defining system prompt, user prompt, and which model to use.
+ */
+ private final MailQuery mailQuery;
+
+ /**
+ * The temperature (creativity factor) for the AI.
+ */
private final Float temperature;
- File inputFile;
/**
- * Creates a new AI task.
+ * Temporary file used as input to the llama.cpp CLI.
+ */
+ private File inputFile;
+
+ /**
+ * Creates a new AI task with a given mail query.
+ *
+ * @param mailQuery the mail query containing model and prompts.
*/
public AiTask(MailQuery mailQuery) {
this.mailQuery = mailQuery;
this.temperature = configuration.getDefaultTemperature();
}
+ /**
+ * Builds the prompt text that is fed to llama.cpp, including the system prompt,
+ * the user prompt, and an "ASSISTANT:" marker signifying where the AI response begins.
+ *
+ * @return a string containing the fully prepared query prompt.
+ */
private String buildAiQuery() {
StringBuilder sb = new StringBuilder();
sb.append("SYSTEM:\n").append(mailQuery.systemPrompt).append("\n");
String filteredUserPrompt = filterParticipantsInUserInput(mailQuery.userPrompt);
- if (!filteredUserPrompt.startsWith("USER:")) sb.append("USER:\n");
+ if (!filteredUserPrompt.startsWith("USER:")) {
+ sb.append("USER:\n");
+ }
sb.append(filteredUserPrompt).append("\n");
sb.append(AI_RESPONSE_MARKER);
return sb.toString();
}
+ /**
+ * In the user input, rewrite lines like "* USER:" or "* ASSISTANT:"
+ * to "USER:" or "ASSISTANT:" so that we standardize them in the final prompt.
+ *
+ * @param input the raw user input.
+ * @return a sanitized or standardized version of the user prompt.
+ */
public static String filterParticipantsInUserInput(String input) {
StringBuilder result = new StringBuilder();
String[] lines = input.split("\n");
for (int i = 0; i < lines.length; i++) {
String line = lines[i];
- if (i > 0) result.append("\n");
- if ("* ASSISTANT:".equals(line)) line = "ASSISTANT:";
- if ("* USER:".equals(line)) line = "USER:";
+ if (i > 0) {
+ result.append("\n");
+ }
+ if ("* ASSISTANT:".equals(line)) {
+ line = "ASSISTANT:";
+ }
+ if ("* USER:".equals(line)) {
+ line = "USER:";
+ }
result.append(line);
}
return result.toString();
}
+ /**
+ * In the AI's response, revert lines like "ASSISTANT:" to "* ASSISTANT:"
+ * for easier reading in org-mode, plus append a * USER: prompt at the end
+ * to form the basis for a continuing conversation.
+ *
+ * @param response the raw AI response.
+ * @return a sanitized response for org-mode usage.
+ */
public static String filterParticipantsInAiResponse(String response) {
StringBuilder result = new StringBuilder();
String[] lines = response.split("\n");
for (int i = 0; i < lines.length; i++) {
String line = lines[i];
- if (i > 0) result.append("\n");
- if ("ASSISTANT:".equals(line)) line = "* ASSISTANT:";
- if ("USER:".equals(line)) line = "* USER:";
+ if (i > 0) {
+ result.append("\n");
+ }
+ if ("ASSISTANT:".equals(line)) {
+ line = "* ASSISTANT:";
+ }
+ if ("USER:".equals(line)) {
+ line = "* USER:";
+ }
result.append(line);
}
result.append("\n* USER:\n");
}
/**
- * Compute the AI task.
- * @return The result of the AI task.
+ * Runs the AI query by constructing the prompt, writing it to a temp file,
+ * invoking llama.cpp, collecting output, and performing any final cleanup.
+ *
+ * @return the AI's response in a format suitable for appending back into
+ * the conversation file.
+ * @throws InterruptedException if the process is interrupted.
+ * @throws IOException if reading/writing the file fails or the process fails to start.
*/
public String runAiQuery() throws InterruptedException, IOException {
try {
+ // Build input prompt
initializeInputFile(buildAiQuery());
+ // Prepare process builder
ProcessBuilder processBuilder = new ProcessBuilder();
- processBuilder.command(getCliCommand().split("\\s+")); // Splitting the command string into parts
+ processBuilder.command(getCliCommand().split("\\s+")); // Splitting the command string into tokens
+ // Start process
Process process = processBuilder.start();
+
+ // Handle process's error stream
handleErrorThread(process);
+
+ // Handle process's output stream
StringBuilder result = new StringBuilder();
Thread outputThread = handleResultThread(process, result);
- process.waitFor(); // Wait for the main AI computing process to finish
- outputThread.join(); // Wait for the output thread to finish
+
+ // Wait for the process to finish
+ process.waitFor();
+
+ // Wait for the output thread to finish reading
+ outputThread.join();
+
+ // Clean up the AI response: remove partial prompt text, end-of-text marker, etc.
return filterParticipantsInAiResponse(cleanupAiResponse(result.toString()));
} finally {
deleteTemporaryFile();
}
/**
- * Initializes the input file for the AI task.
+ * Creates a temporary file for the AI input and writes the prompt to it.
+ *
+ * @param aiQuery the final prompt string for the AI to process.
+ * @throws IOException if file creation or writing fails.
*/
- private void initializeInputFile(String aiQuery ) throws IOException {
- // write AI input to file
+ private void initializeInputFile(String aiQuery) throws IOException {
inputFile = createTemporaryFile();
Files.write(inputFile.toPath(), aiQuery.getBytes());
}
/**
- * Creates and starts a thread to handle the error stream of an AI inference process.
+ * Creates a temporary file that will be used for the AI prompt input.
*
- * @param process the process to read the error stream from.
+ * @return a new {@link File} referencing the created temporary file.
+ * @throws IOException if the file could not be created.
*/
- private static void handleErrorThread(Process process) {
- Thread errorThread = new Thread(() -> {
- try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getErrorStream()))) {
- String line;
- while ((line = reader.readLine()) != null) handleErrorStreamLine(line);
- } catch (IOException e) {
- System.err.println("Error reading error stream: " + e.getMessage());
- }
- });
- errorThread.start();
+ private File createTemporaryFile() throws IOException {
+ File file = Files.createTempFile("ai-inference", ".tmp").toFile();
+ file.deleteOnExit();
+ return file;
}
-
/**
- * Handles a single line from the error stream of an AI inference process.
- * If the line contains meta-info, it is printed to the console.
+ * Cleans up the AI response by removing the partial text before the
+ * AI response marker and after the end-of-text marker, if specified.
*
- * @param line the line to be handled.
+ * @param result the raw output from llama.cpp.
+ * @return the cleaned AI response.
*/
- private static void handleErrorStreamLine(String line) {
- if (line.startsWith(LLAMA_CPP_META_INFO_MARKER)) {
- // Print the meta-info to console
- System.out.println(line.substring(LLAMA_CPP_META_INFO_MARKER.length()));
- return;
+ private String cleanupAiResponse(String result) {
+ // remove text before AI response marker
+ int aIResponseIndex = result.lastIndexOf(AI_RESPONSE_MARKER);
+ if (aIResponseIndex != -1) {
+ result = result.substring(aIResponseIndex + AI_RESPONSE_MARKER.length());
+ }
+
+ // remove text after end of text marker, if it exists
+ if (mailQuery.model.endOfTextMarker != null) {
+ int endOfTextMarkerIndex = result.indexOf(mailQuery.model.endOfTextMarker);
+ if (endOfTextMarkerIndex != -1) {
+ result = result.substring(0, endOfTextMarkerIndex);
+ }
}
- // Print the error to console
- Utils.printRedMessageToConsole(line);
+ return result + "\n";
}
/**
- * Gets the full command to be executed by the AI inference process.
+ * Returns the full command string used to run the AI inference via llama.cpp.
*
- * @return the full command to be executed by the AI inference process.
+ * @return a string representing the command and all arguments.
*/
private String getCliCommand() {
-
- int niceValue = 10; // Set the desired niceness level (10 is a common value for background tasks)
-
+ int niceValue = 10; // niceness level for background tasks
String executablePath = configuration.getLlamaCliPath().getAbsolutePath();
return join(" ",
"--no-conversation",
"-n -1",
"--repeat_penalty 1.1",
- "--file " + inputFile);
+ "--file " + inputFile
+ );
+ }
+ /**
+ * Spawns a new Thread to handle the error stream from llama.cpp,
+ * printing lines that contain metadata or errors to the console.
+ *
+ * @param process the process whose error stream is consumed.
+ */
+ private static void handleErrorThread(Process process) {
+ Thread errorThread = new Thread(() -> {
+ try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getErrorStream()))) {
+ String line;
+ while ((line = reader.readLine()) != null) {
+ handleErrorStreamLine(line);
+ }
+ } catch (IOException e) {
+ System.err.println("Error reading error stream: " + e.getMessage());
+ }
+ });
+ errorThread.start();
}
+ /**
+ * Decides what to do with each line from the error stream:
+ * if it matches the llama.cpp meta-info marker, print it normally;
+ * otherwise print as an error.
+ *
+ * @param line a line from the llama.cpp error stream.
+ */
+ private static void handleErrorStreamLine(String line) {
+ if (line.startsWith(LLAMA_CPP_META_INFO_MARKER)) {
+ // Print the meta-info to the console in normal color
+ System.out.println(line.substring(LLAMA_CPP_META_INFO_MARKER.length()));
+ } else {
+ // Print actual error lines in red
+ Utils.printRedMessageToConsole(line);
+ }
+ }
/**
- * Creates and starts a thread to handle the result of the AI inference process.
- * The result is read from the process's input stream and saved in a StringBuilder.
+ * Consumes the standard output (inference result) from the
+ * llama.cpp process, storing it into a result buffer for further
+ * cleanup, while simultaneously printing it to the console.
*
- * @param process the process to read the result from.
- * @param result the StringBuilder to save the result in.
- * @return the thread that handles the result.
+ * @param process the AI inference process.
+ * @param result a string builder to accumulate the final result.
+ * @return the thread that is reading the output stream.
*/
private static Thread handleResultThread(Process process, StringBuilder result) {
Thread outputThread = new Thread(() -> {
try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()))) {
String aiResultLine;
while ((aiResultLine = reader.readLine()) != null) {
- System.out.print("AI: " + aiResultLine + "\n"); // Display each line as it's being read
- result.append(aiResultLine).append("\n"); // Save the result
+ System.out.print("AI: " + aiResultLine + "\n"); // Show each line in real-time
+ result.append(aiResultLine).append("\n");
}
} catch (IOException e) {
throw new RuntimeException(e);
}
/**
- * Returns the temporary file for the AI to work with.
+ * Deletes the temporary input file once processing is complete.
*/
- private File createTemporaryFile() throws IOException {
- File file = Files.createTempFile("ai-inference", ".tmp").toFile();
- file.deleteOnExit();
- return file;
- }
-
- /**
- * Cleans up the AI response by removing unnecessary text.
- * @param result the AI response string to be cleaned up.
- * @return the cleaned-up AI response.k
- */
- private String cleanupAiResponse(String result) {
-
- // remove text before AI response marker
- int aIResponseIndex = result.lastIndexOf(AI_RESPONSE_MARKER);
- if (aIResponseIndex != -1) {
- result = result.substring(aIResponseIndex + AI_RESPONSE_MARKER.length());
- }
-
- // remove text after end of text marker, if it exists
- if (mailQuery.model.endOfTextMarker != null) {
- int endOfTextMarkerIndex = result.indexOf(mailQuery.model.endOfTextMarker);
- if (endOfTextMarkerIndex != -1) {
- result = result.substring(0, endOfTextMarkerIndex);
- }
- }
-
- return result + "\n";
- }
-
private void deleteTemporaryFile() {
if (inputFile != null && inputFile.exists()) {
try {
}
}
}
-
-
}
import eu.svjatoslav.alyverkko_cli.Command;
import eu.svjatoslav.alyverkko_cli.model.ModelLibrary;
-import eu.svjatoslav.commons.cli_helper.parameter_parser.Parser;
-import eu.svjatoslav.commons.cli_helper.parameter_parser.parameter.*;
import java.io.IOException;
import static eu.svjatoslav.alyverkko_cli.Main.configuration;
import static eu.svjatoslav.alyverkko_cli.configuration.Configuration.loadConfiguration;
+/**
+ * Lists all configured models in the system, loading them from the
+ * user’s configuration and printing them to the console.
+ */
public class ListModelsCommand implements Command {
+ /**
+ * @return the name of this command, i.e., "listmodels".
+ */
@Override
public String getName() {
return "listmodels";
}
+ /**
+ * Executes the command to load the user's configuration and list
+ * all known AI models, printing them to stdout.
+ *
+ * @param cliArguments the command-line arguments after "listmodels".
+ * @throws IOException if loading configuration fails.
+ */
@Override
public void execute(String[] cliArguments) throws IOException {
configuration = loadConfiguration();
System.out.println("Listing models in directory: " + configuration.getModelsDirectory());
ModelLibrary modelLibrary = new ModelLibrary(configuration.getModelsDirectory(), configuration.getModels());
modelLibrary.printModels();
-
}
}
import java.util.Map;
import java.util.Optional;
-import static eu.svjatoslav.alyverkko_cli.configuration.Configuration.loadConfiguration;
import static eu.svjatoslav.alyverkko_cli.Main.configuration;
+import static eu.svjatoslav.alyverkko_cli.configuration.Configuration.loadConfiguration;
import static eu.svjatoslav.commons.file.IOHelper.getFileContentsAsString;
import static eu.svjatoslav.commons.file.IOHelper.saveToFile;
import static java.nio.file.StandardWatchEventKinds.ENTRY_CREATE;
import static java.nio.file.StandardWatchEventKinds.ENTRY_MODIFY;
+/**
+ * The MailCorrespondentCommand continuously monitors a specified mail
+ * directory for new or modified text files, checks if they have a
+ * "TOCOMPUTE:" marker, and if so, processes them with an AI model.
+ * Once processed, results are appended to the same file.
+ *
+ * Usage:
+ * <pre>
+ * alyverkko-cli mail
+ * </pre>
+ */
public class MailCorrespondentCommand implements Command {
+ /**
+ * A command-line parser to handle "mail" command arguments.
+ */
final Parser parser = new Parser();
+ /**
+ * Optional CLI argument for specifying a configuration file path.
+ */
+ public FileOption configFileOption = parser.add(new FileOption("Configuration file path"))
+ .addAliases("--config", "-c")
+ .mustExist();
+
+ /**
+ * The library of available models, constructed from configuration.
+ */
ModelLibrary modelLibrary;
- private WatchService watcher;
+ /**
+ * The WatchService instance for monitoring file system changes in
+ * the mail directory.
+ */
+ private WatchService directoryWatcher;
- File mailDirectory;
+ /**
+ * The directory that we continuously watch for new tasks.
+ */
+ File mailDir;
/**
- * Configuration file location.
+ * @return the name of this command, i.e., "mail".
*/
- public FileOption configFileOption = parser.add(new FileOption("Configuration file path"))
- .addAliases("--config", "-c").mustExist();
+ @Override
+ public String getName() {
+ return "mail";
+ }
+
+ /**
+ * Executes the "mail" command, loading configuration, starting a
+ * WatchService on the mail directory, and running an infinite loop
+ * that processes newly discovered tasks.
+ *
+ * @param cliArguments the command-line arguments following the "mail" subcommand.
+ * @throws IOException if reading/writing tasks fails.
+ * @throws InterruptedException if the WatchService is interrupted.
+ */
+ @Override
+ public void execute(String[] cliArguments) throws IOException, InterruptedException {
+ if (!parser.parse(cliArguments)) {
+ System.out.println("Failed to parse commandline arguments");
+ parser.showHelp();
+ return;
+ }
+
+ configuration = loadConfiguration(configFileOption.isPresent() ? configFileOption.getValue() : null);
+ if (configuration == null) {
+ System.out.println("Failed to load configuration file");
+ return;
+ }
+
+ modelLibrary = new ModelLibrary(configuration.getModelsDirectory(), configuration.getModels());
+ mailDir = configuration.getMailDirectory();
+ // Set up directory watch service
+ initializeFileWatcher();
+
+ // Process any existing files that might already be in the directory
+ initialMailScanAndReply();
+
+ System.out.println("Mail correspondent running. Press CTRL+c to terminate.");
+
+ // Main loop: watch for file events
+ while (true) {
+ WatchKey key;
+ try {
+ key = directoryWatcher.take();
+ } catch (InterruptedException e) {
+ System.out.println("Interrupted while waiting for file system events. Exiting.");
+ break;
+ }
+
+ System.out.println("Detected filesystem event.");
+
+ // Sleep briefly to allow the file to be fully written
+ Thread.sleep(1000);
+
+ processDetectedFilesystemEvents(key);
+
+ if (!key.reset()) {
+ break;
+ }
+ }
+
+ directoryWatcher.close();
+ }
+
+ /**
+ * Performs an initial scan of existing files in the mail directory,
+ * processing those that need AI inference (i.e., that start with "TOCOMPUTE:").
+ *
+ * @throws IOException if reading files fails.
+ * @throws InterruptedException if the thread is interrupted.
+ */
private void initialMailScanAndReply() throws IOException, InterruptedException {
- File[] files = mailDirectory.listFiles();
+ File[] files = mailDir.listFiles();
if (files == null) return;
- for (File file : files)
+ for (File file : files) {
processMailIfNeeded(file);
+ }
}
+ /**
+ * Checks if a file needs to be processed by verifying that it:
+ * 1) is not hidden,
+ * 2) is a regular file,
+ * 3) starts with "TOCOMPUTE:" in the first line.
+ *
+ * @param file the file to inspect.
+ * @return true if the file meets the criteria for AI processing.
+ * @throws IOException if reading the file fails.
+ */
private boolean isMailProcessingNeeded(File file) throws IOException {
// ignore hidden files
- if (file.getName().startsWith("."))
+ if (file.getName().startsWith(".")) {
return false;
+ }
- // Check if the file is a mail file (not a directory
- if (!file.isFile()) return false;
+ // Check if it's a regular file
+ if (!file.isFile()) {
+ return false;
+ }
+ // Ensure the first line says "TOCOMPUTE:"
return fileHasToComputeMarker(file);
}
+ /**
+ * Inspects the first line of the file to see if it starts with "TOCOMPUTE:".
+ *
+ * @param file the file to read.
+ * @return true if the file's first line starts with "TOCOMPUTE:".
+ * @throws IOException if file reading fails.
+ */
private static boolean fileHasToComputeMarker(File file) throws IOException {
try (BufferedReader reader = new BufferedReader(new FileReader(file))) {
String firstLine = reader.readLine();
}
}
+ /**
+ * Processes a file if it has the "TOCOMPUTE:" marker, running an AI
+ * query and appending the result to the file. Otherwise logs that
+ * it's being ignored.
+ *
+ * @param file the file to possibly process.
+ * @throws IOException if reading/writing the file fails.
+ * @throws InterruptedException if the AI query is interrupted.
+ */
private void processMailIfNeeded(File file) throws IOException, InterruptedException {
if (!isMailProcessingNeeded(file)) {
System.out.println("Ignoring file: " + file.getName() + " (does not need processing for now)");
System.out.println("\nReplying to mail: " + file.getName());
- // Read the mail contents, and remove the TOCOMPUTE: prefix from the first line
+ // Read the mail content
String inputFileContent = getFileContentsAsString(file);
+
+ // Parse the relevant data into a MailQuery object
MailQuery mailQuery = parseInputFileContent(inputFileContent);
+ // Create an AiTask and run the query
AiTask aiTask = new AiTask(mailQuery);
String aiGeneratedResponse = aiTask.runAiQuery();
- // Prepare result file content
+ // Build new content
StringBuilder resultFileContent = new StringBuilder();
- if (!mailQuery.userPrompt.startsWith("* USER:\n")) resultFileContent.append("* USER:\n");
+
+ // Ensure the user prompt block is labeled if it isn't already
+ if (!mailQuery.userPrompt.startsWith("* USER:\n")) {
+ resultFileContent.append("* USER:\n");
+ }
resultFileContent.append(mailQuery.userPrompt).append("\n");
- resultFileContent.append("* ASSISTANT:\n").append(aiGeneratedResponse).append("\n");
- // Write result content to the file
+ // Append the AI response block
+ resultFileContent
+ .append("* ASSISTANT:\n")
+ .append(aiGeneratedResponse)
+ .append("\n");
+
+ // Write the combined result back to the same file
saveToFile(file, resultFileContent.toString());
}
+ /**
+ * Converts the raw file content (including the line beginning with "TOCOMPUTE:")
+ * into a {@link MailQuery} object that the AI can process.
+ *
+ * @param inputFileContent the raw contents of the mail file.
+ * @return a {@link MailQuery} containing the system prompt, user prompt, and the selected model.
+ * @throws IOException if reading prompt files fails.
+ */
private MailQuery parseInputFileContent(String inputFileContent) throws IOException {
MailQuery mailQuery = new MailQuery();
- // deduct user prompt
+ // Find the newline that separates "TOCOMPUTE: ..." from the rest
int firstNewLineIndex = inputFileContent.indexOf('\n');
if (firstNewLineIndex == -1) {
- throw new IllegalArgumentException("Input file is only one line long. This is the content: " + inputFileContent);
+ throw new IllegalArgumentException("Input file is only one line long. Content: " + inputFileContent);
} else {
+ // The user prompt is everything after the first line
mailQuery.userPrompt = inputFileContent.substring(firstNewLineIndex + 1);
}
- // Parse TOCOMPUTE line for inference settings
+ // The first line will look like "TOCOMPUTE: model=... prompt=... etc."
String firstLine = inputFileContent.substring(0, firstNewLineIndex);
+
+ // Parse out the key/value pairs
Map<String, String> settings = parseSettings(firstLine);
- mailQuery.systemPrompt = configuration.getPromptByAlias(settings.getOrDefault("prompt", "default"));
+ // Look up system prompt from the "prompt" alias
+ String promptAlias = settings.getOrDefault("prompt", "default");
+ mailQuery.systemPrompt = configuration.getPromptByAlias(promptAlias);
- { // resolve model
- String modelAlias = settings.getOrDefault("model", "default");
- Optional<Model> modelOptional = modelLibrary.findModelByAlias(modelAlias);
- if (!modelOptional.isPresent())
- throw new IllegalArgumentException("Model with alias '" + modelAlias + "' not found.");
- mailQuery.model = modelOptional.get();
+ // Resolve model from the "model" alias
+ String modelAlias = settings.getOrDefault("model", "default");
+ Optional<Model> modelOptional = modelLibrary.findModelByAlias(modelAlias);
+ if (!modelOptional.isPresent()) {
+ throw new IllegalArgumentException("Model with alias '" + modelAlias + "' not found.");
}
+ mailQuery.model = modelOptional.get();
return mailQuery;
}
-
- @Override
- public String getName() {
- return "mail";
- }
-
- @Override
- public void execute(String[] cliArguments) throws IOException, InterruptedException {
- if (!parser.parse(cliArguments)) {
- System.out.println("Failed to parse commandline arguments");
- parser.showHelp();
- return;
- }
-
- configuration = loadConfiguration(configFileOption.isPresent() ? configFileOption.getValue() : null);
- if (configuration == null){
- System.out.println("Failed to load configuration file");
- return;
- }
-
- modelLibrary = new ModelLibrary(configuration.getModelsDirectory(), configuration.getModels());
- mailDirectory = configuration.getMailDirectory();
-
- initializeFileWatcher();
-
- // before we start processing incremental changes in directory, we need to process all the existing files
- initialMailScanAndReply();
-
- System.out.println("Mail correspondent running. Press CTRL+c to terminate.");
-
- while (true) {
- WatchKey key;
- try {
- key = watcher.take();
- } catch (InterruptedException e) {
- System.out.println("Interrupted while waiting for file system events. Exiting.");
- break;
- }
-
- System.out.println("Detected filesystem event.");
-
- // sleep for a while to allow the file to be fully written
- Thread.sleep(1000);
-
- processDetectedFilesystemEvents(key);
-
- if (!key.reset()) break;
- }
-
- watcher.close();
- }
-
+ /**
+ * Parses the "TOCOMPUTE:" line, which should look like:
+ * <pre>TOCOMPUTE: key1=value1 key2=value2 ...</pre>
+ *
+ * @param toComputeLine the line beginning with "TOCOMPUTE:".
+ * @return a map of settings derived from that line.
+ */
private Map<String, String> parseSettings(String toComputeLine) {
-
if (!toComputeLine.startsWith("TOCOMPUTE:")) {
throw new IllegalArgumentException("Invalid TOCOMPUTE line: " + toComputeLine);
}
+ // If there's nothing beyond "TOCOMPUTE:", just return an empty map
if (toComputeLine.length() <= "TOCOMPUTE: ".length()) {
return new HashMap<>();
}
- // Assuming the format is "TOCOMPUTE: key1=value1 key2=value2 ..."
+ // Example format: "TOCOMPUTE: prompt=writer model=mistral"
String[] parts = toComputeLine.substring("TOCOMPUTE: ".length()).split("\\s+");
-
Map<String, String> settings = new HashMap<>();
+
for (String part : parts) {
String[] keyValue = part.split("=");
- if (keyValue.length == 2) settings.put(keyValue[0], keyValue[1]);
+ if (keyValue.length == 2) {
+ settings.put(keyValue[0], keyValue[1]);
+ }
}
return settings;
}
+ /**
+ * Handles the filesystem events from the WatchService (e.g. file creation
+ * or modification), then processes those files if necessary.
+ *
+ * @param key the watch key containing the events.
+ * @throws IOException if file reading/writing fails.
+ * @throws InterruptedException if the AI process is interrupted.
+ */
private void processDetectedFilesystemEvents(WatchKey key) throws IOException, InterruptedException {
for (WatchEvent<?> event : key.pollEvents()) {
WatchEvent.Kind<?> kind = event.kind();
// Skip OVERFLOW event
- if (kind == StandardWatchEventKinds.OVERFLOW) continue;
+ if (kind == StandardWatchEventKinds.OVERFLOW) {
+ continue;
+ }
- // Retrieve the file name associated with the event
+ // The filename for the event
Path filename = ((WatchEvent<Path>) event).context();
System.out.println("Event: " + kind + " for file: " + filename);
- // Process the event
- processFileSystemEvent(kind, filename);
+ // Process the file
+ if (kind == ENTRY_CREATE || kind == ENTRY_MODIFY) {
+ File file = mailDir.toPath().resolve(filename).toFile();
+ processMailIfNeeded(file);
+ }
}
}
+ /**
+ * Registers the mail directory with a WatchService for ENTRY_CREATE
+ * and ENTRY_MODIFY events.
+ *
+ * @throws IOException if registration fails.
+ */
private void initializeFileWatcher() throws IOException {
- this.watcher = FileSystems.getDefault().newWatchService();
- Paths.get(mailDirectory.getAbsolutePath()).register(watcher, ENTRY_CREATE, ENTRY_MODIFY);
- }
-
- private void processFileSystemEvent(WatchEvent.Kind<?> kind, Path filename) throws IOException, InterruptedException {
- if (kind != ENTRY_CREATE && kind != ENTRY_MODIFY) return;
-
- File file = mailDirectory.toPath().resolve(filename).toFile();
- processMailIfNeeded(file);
+ this.directoryWatcher = FileSystems.getDefault().newWatchService();
+ Paths.get(mailDir.getAbsolutePath()).register(directoryWatcher, ENTRY_CREATE, ENTRY_MODIFY);
}
-
-
}
import java.util.List;
import java.util.Optional;
+/**
+ * A container (library) for multiple AI models, providing
+ * functionality for adding and retrieving models by alias.
+ */
public class ModelLibrary {
/**
- * List of all models available in the library.
+ * The list of all successfully loaded models in this library.
*/
private final List<Model> models;
+ /**
+ * The default model for this library (e.g., the first successfully
+ * loaded model in the list).
+ */
private static Model defaultModel;
+
+ /**
+ * Base directory containing the model files.
+ */
private final File modelsBaseDirectory;
/**
- * Represents a library of AI models.
+ * Constructs a library of AI models from the provided list of
+ * {@link ConfigurationModel}s, ignoring those whose paths do not exist.
*
- * @param modelsBaseDirectory the directory containing the models
+ * @param modelsBaseDirectory the root directory where model files are stored.
+ * @param configModels a list of model configurations.
*/
public ModelLibrary(File modelsBaseDirectory, List<ConfigurationModel> configModels) {
this.modelsBaseDirectory = modelsBaseDirectory;
- models = new ArrayList<>();
+ this.models = new ArrayList<>();
+ for (ConfigurationModel configModel : configModels) {
+ addModelFromConfig(configModel);
+ }
- for (ConfigurationModel configModel : configModels) addModelFromConfig(configModel);
-
- if (models.isEmpty())
+ if (models.isEmpty()) {
throw new RuntimeException("No models are defined!");
+ }
defaultModel = models.get(0);
}
+ /**
+ * Attempts to construct a {@link Model} from the given
+ * {@link ConfigurationModel}, verifying that the file actually exists.
+ *
+ * @param configModel the configuration describing the model.
+ */
private void addModelFromConfig(ConfigurationModel configModel) {
-
- // validate that model actually exists in the filesystem
File modelFile = new File(modelsBaseDirectory, configModel.getFilesystemPath());
- if (!modelFile.exists()){
+ if (!modelFile.exists()) {
Utils.printRedMessageToConsole("WARN: Model file not found: " + modelFile.getAbsolutePath() + " . Skipping model.");
return;
}
));
}
-
/**
- * Adds a given model to the existing models list if it does not already exist.
+ * Adds a model to the library if no model with the same alias
+ * already exists.
*
- * @param model the model to be added
- * @throws RuntimeException if a model with the same alias already exists in the models list
+ * @param model the model to add.
*/
- public void addModel(Model model){
- if (findModelByAlias(model.alias).isPresent())
+ public void addModel(Model model) {
+ if (findModelByAlias(model.alias).isPresent()) {
throw new RuntimeException("Model with alias \"" + model.alias + "\" already exists!");
-
+ }
models.add(model);
}
/**
- * Returns a list of all the models.
- *
- * @return a list of {@link Model} objects representing the models
+ * @return the list of loaded models in this library.
*/
public List<Model> getModels() {
return models;
}
/**
- * Finds a model by its alias.
+ * Finds a model by its alias in this library.
*
- * @param alias the alias of the model to be found
- * @return an {@link Optional} containing the model if it was found, or an empty {@link Optional} otherwise
+ * @param alias the model alias to look for.
+ * @return an {@link Optional} describing the found model, or empty if none match.
*/
- public Optional<Model> findModelByAlias(String alias){
+ public Optional<Model> findModelByAlias(String alias) {
return models.stream()
.filter(model -> model.alias.equals(alias))
.findFirst();
}
+ /**
+ * @return the default model (first loaded model).
+ */
public Model getDefaultModel() {
return defaultModel;
}
/**
- * Prints the details of each model in the list.
+ * Prints the details of each model in the library to standard output.
*/
- public void printModels(){
+ public void printModels() {
System.out.println("Available models:\n");
for (Model model : models) {
model.printModelDetails();