import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
-
import java.io.*;
import java.util.List;
import static eu.svjatoslav.commons.file.IOHelper.getFileContentsAsString;
+/**
+ * Encapsulates all user configuration for the Älyverkko CLI application,
+ * such as model directories, mail directory, default temperature,
+ * llama-cli path, etc.
+ */
@Data
public class Configuration {
- public static final String DEFAULT_CONFIG_FILE_PATH = "~/.config/alyverkko-cli/alyverkko-cli.yaml".replaceFirst("^~", System.getProperty("user.home"));
+ /**
+ * The default path for the YAML config file, typically under the user's home directory.
+ */
+ public static final String DEFAULT_CONFIG_FILE_PATH = "~/.config/alyverkko-cli/alyverkko-cli.yaml"
+ .replaceFirst("^~", System.getProperty("user.home"));
+ /**
+ * Directory where AI tasks (mail) are placed and discovered.
+ */
@JsonProperty("mail_directory")
private File mailDirectory;
+ /**
+ * Directory that contains AI model files in GGUF format.
+ */
@JsonProperty("models_directory")
private File modelsDirectory;
+ /**
+ * The default "temperature" used by the AI for creative/deterministic
+ * tradeoff. Ranges roughly between 0 and 3.
+ */
@JsonProperty("default_temperature")
private float defaultTemperature;
+ /**
+ * The filesystem path to the llama-cli executable, which processes
+ * AI tasks via llama.cpp.
+ */
@JsonProperty("llama_cli_path")
private File llamaCliPath;
+ /**
+ * Number of CPU threads used for input prompt processing.
+ */
@JsonProperty("batch_thread_count")
private int batchThreadCount;
+ /**
+ * Number of CPU threads used for AI inference.
+ */
@JsonProperty("thread_count")
private int threadCount;
+ /**
+ * Directory containing text prompt files. Each file is a separate
+ * "prompt" by alias (the filename minus ".txt").
+ */
@JsonProperty("prompts_directory")
private File promptsDirectory;
+ /**
+ * The list of models defined in this configuration.
+ */
private List<ConfigurationModel> models;
- public void setModels(List<ConfigurationModel> models) {
- this.models = models;
- }
-
- public Configuration() {
- }
-
+ /**
+ * Loads the configuration from the default file path.
+ *
+ * @return the {@link Configuration} object, or null if the file doesn't exist or fails parsing.
+ * @throws IOException if file I/O fails during reading.
+ */
public static Configuration loadConfiguration() throws IOException {
return loadConfiguration(null);
}
+ /**
+ * Loads the configuration from a given file, or from the default
+ * path if {@code configFile} is null.
+ *
+ * @param configFile the file containing the YAML config; may be null.
+ * @return the {@link Configuration} object, or null if not found/invalid.
+ * @throws IOException if file I/O fails during reading.
+ */
public static Configuration loadConfiguration(File configFile) throws IOException {
-
if (configFile == null) {
// Load configuration from the default path
configFile = new File(DEFAULT_CONFIG_FILE_PATH);
return mapper.readValue(configFile, Configuration.class);
}
+ /**
+ * Retrieves the contents of a prompt file by alias, e.g. "writer"
+ * maps to "writer.txt" in the prompts directory.
+ *
+ * @param alias the name of the prompt file (without ".txt").
+ * @return the full text content of the prompt file.
+ * @throws IOException if reading the prompt file fails.
+ */
public String getPromptByAlias(String alias) throws IOException {
File promptFile = new File(promptsDirectory, alias + ".txt");
return getFileContentsAsString(promptFile);
import java.io.File;
+/**
+ * Represents an AI model stored on the filesystem, including details such
+ * as path, context size, alias, and an optional end-of-text marker.
+ */
public class Model {
+
/**
- * The path to the file system where the model will be stored.
+ * The path to the model file on the filesystem.
*/
public final File filesystemPath;
/**
- * The size of the context in terms of tokens.
+ * The size of the context (in tokens) that this model is able to handle.
*/
public final int contextSizeTokens;
/**
- * The alias for the model.
+ * A user-friendly alias for the model, e.g. "default" or "mistral".
*/
public final String alias;
/**
- * The marker used to signify the end of AI generated text.
+ * An optional marker indicating end of the AI-generated text (e.g., "###").
+ * If non-null, it can be used to detect where the model has finished answering.
*/
public final String endOfTextMarker;
-
/**
- * The constructor for the Model class.
- * @param filesystemPath The path to the file system where the model will be stored.
- * @param contextSizeTokens The size of the context in terms of tokens.
- * @param modelAlias The alias for the model.
- * @param endOfTextMarker The marker used to signify the end of AI generated text.
+ * Constructs a {@link Model} instance.
+ *
+ * @param filesystemPath The path to the model file on the filesystem.
+ * @param contextSizeTokens The size of the context in tokens.
+ * @param modelAlias A short alias by which the model is referenced.
+ * @param endOfTextMarker Optional text that signifies the end of the AI's output.
*/
public Model(File filesystemPath, int contextSizeTokens, String modelAlias, String endOfTextMarker) {
this.filesystemPath = filesystemPath;
}
/**
- * Prints the details of the model.
+ * Prints the model's alias, path, and context size to standard output.
*/
- public void printModelDetails(){
+ public void printModelDetails() {
System.out.println("Model: " + alias);
System.out.println(" Path: " + filesystemPath);
System.out.println(" Context size: " + contextSizeTokens);
}
-
}