package eu.svjatoslav.alyverkko_cli.commands;
+import eu.svjatoslav.alyverkko_cli.AiTask;
import eu.svjatoslav.alyverkko_cli.Command;
import eu.svjatoslav.alyverkko_cli.Main;
+import java.io.File;
import java.io.IOException;
import static eu.svjatoslav.alyverkko_cli.Main.configuration;
System.out.println("Starting selftest...");
configuration = loadConfiguration();
+ boolean allChecksPassed = true;
+
// Check if the configuration is loaded
if (Main.configuration == null) {
System.err.println("Configuration not found or invalid.");
- return;
+ allChecksPassed = false;
}
// Validate models directory
if (!Main.configuration.getModelsDirectory().exists() || !Main.configuration.getModelsDirectory().isDirectory()) {
System.err.println("Models directory does not exist or is not a directory: " + Main.configuration.getModelsDirectory());
- return;
+ allChecksPassed = false;
}
// Validate llama.cpp executable path
if (!Main.configuration.getLlamaCppDirPath().exists() || !Main.configuration.getLlamaCppDirPath().isDirectory()) {
System.err.println("llama.cpp project directory not found at: " + Main.configuration.getLlamaCppDirPath());
- return;
+ allChecksPassed = false;
+ }
+
+ // Validate mail directory
+ if (!Main.configuration.getMailDirectory().exists() || !Main.configuration.getMailDirectory().isDirectory()) {
+ System.err.println("Mail directory does not exist or is not a directory: " + Main.configuration.getMailDirectory());
+ allChecksPassed = false;
+ }
+
+ // Validate prompts
+ if (Main.configuration.getPrompts().isEmpty()) {
+ System.err.println("No prompts are defined in the configuration.");
+ allChecksPassed = false;
+ }
+
+ // Validate models
+ if (Main.configuration.getModels().isEmpty()) {
+ System.err.println("No models are defined in the configuration.");
+ allChecksPassed = false;
+ }
+
+ // Validate default temperature
+ if (Main.configuration.getDefaultTemperature() < 0 || Main.configuration.getDefaultTemperature() > 3) {
+ System.err.println("Default temperature must be between 0 and 3.");
+ allChecksPassed = false;
+ }
+
+ // Validate thread count
+ if (Main.configuration.getThreadCount() < 1) {
+ System.err.println("Thread count must be at least 1.");
+ allChecksPassed = false;
+ }
+
+ // Validate batch thread count
+ if (Main.configuration.getBatchThreadCount() < 1) {
+ System.err.println("Batch thread count must be at least 1.");
+ allChecksPassed = false;
+ }
+
+ // Validate models
+ if (Main.configuration.getModels().isEmpty()) {
+ System.err.println("No models are defined in the configuration.");
+ allChecksPassed = false;
}
- // Additional checks like model file existence, etc., can be added here
+ // Validate the main executable
+ File llamaMainExecutable = new File(configuration.getLlamaCppDirPath(), AiTask.MAIN_EXECUTABLE_NAME);
+ if (!llamaMainExecutable.exists() || !llamaMainExecutable.isFile()) {
+ System.err.println("The 'main' executable was not found in the llama.cpp directory.");
+ allChecksPassed = false;
+ }
- System.out.println("Selftest completed successfully.");
+ if (allChecksPassed)
+ System.out.println("Selftest completed successfully.");
}
}