Specify llama-cli file instead of llama.cpp project directory in configuration.
authorSvjatoslav Agejenko <svjatoslav@svjatoslav.eu>
Wed, 1 Jan 2025 22:51:07 +0000 (00:51 +0200)
committerSvjatoslav Agejenko <svjatoslav@svjatoslav.eu>
Wed, 1 Jan 2025 22:51:07 +0000 (00:51 +0200)
doc/setup.org
src/main/java/eu/svjatoslav/alyverkko_cli/AiTask.java
src/main/java/eu/svjatoslav/alyverkko_cli/commands/SelftestCommand.java
src/main/java/eu/svjatoslav/alyverkko_cli/configuration/Configuration.java

index ca7641c..2be0b4d 100644 (file)
@@ -92,7 +92,7 @@ file. Below is an example of how the configuration file might look:
   mail_directory: "/home/user/AI/mail"
   models_directory: "/home/user/AI/models"
   default_temperature: 0.7
-  llama_cpp_dir_path: "/home/user/AI/llama.cpp/"
+  llama_cli_path: "/home/user/AI/llama.cpp/build/bin/llama-cli"
   batch_thread_count: 10
   thread_count: 6
   prompts_directory: "/home/user/.config/alyverkko-cli/prompts"
@@ -137,9 +137,9 @@ Here are available parameters:
   creative or random.
   - Default value: 0.7
 
-- llama_cpp_dir_path :: Specifies the filesystem path to the cloned
-  and compiled *llama.cpp* directory.
-  - Example Value: /home/user/AI/llama.cpp/
+- llama_cli_path :: Specifies the filesystem path to the *llama.cpp*
+  project *llama-cli* executable file.
+  - Example Value: /home/user/AI/llama.cpp/build/bin/llama-cli
   - This option is mandatory.
 
 - batch_thread_count :: Specifies the number of threads to use for
index 7230232..05ba8cd 100644 (file)
@@ -11,7 +11,6 @@ import static java.lang.String.join;
 public class AiTask {
     public static final String AI_RESPONSE_MARKER = "ASSISTANT:";
     private static final String LLAMA_CPP_META_INFO_MARKER = "llm_load_print_meta: ";
-    public static final String MAIN_EXECUTABLE_NAME = "llama-cli";
 
     MailQuery mailQuery;
     private final Float temperature;
@@ -140,10 +139,7 @@ public class AiTask {
 
         int niceValue = 10; // Set the desired niceness level (10 is a common value for background tasks)
 
-        String executablePath = configuration.getLlamaCppDirPath().getAbsolutePath();
-        if (!executablePath.endsWith("/")) executablePath += "/";
-
-        executablePath += MAIN_EXECUTABLE_NAME;
+        String executablePath = configuration.getLlamaCliPath().getAbsolutePath();
 
         return join(" ",
                 "nice", "-n", Integer.toString(niceValue),
index 7669bac..7a3ac3e 100644 (file)
@@ -38,9 +38,9 @@ public class SelftestCommand implements Command {
             allChecksPassed = false;
         }
 
-        // Validate llama.cpp executable path
-        if (!Main.configuration.getLlamaCppDirPath().exists() || !Main.configuration.getLlamaCppDirPath().isDirectory()) {
-            System.err.println("llama.cpp project directory not found at: " + Main.configuration.getLlamaCppDirPath());
+        // Validate llama-cli executable path
+        if (!Main.configuration.getLlamaCliPath().exists() || !Main.configuration.getLlamaCliPath().isFile()) {
+            System.err.println("llama-cli executable path does not point to existing file: " + Main.configuration.getLlamaCliPath());
             allChecksPassed = false;
         }
 
@@ -99,13 +99,6 @@ public class SelftestCommand implements Command {
             allChecksPassed = false;
         }
 
-        // Validate the main executable
-        File llamaMainExecutable = new File(configuration.getLlamaCppDirPath(), AiTask.MAIN_EXECUTABLE_NAME);
-        if (!llamaMainExecutable.exists() || !llamaMainExecutable.isFile()) {
-            System.err.println("The 'main' executable was not found in the llama.cpp directory.");
-            allChecksPassed = false;
-        }
-
         if (allChecksPassed)
             System.out.println("Selftest completed successfully.");
     }
index 5092c8e..b301325 100644 (file)
@@ -25,8 +25,8 @@ public class Configuration {
     @JsonProperty("default_temperature")
     private float defaultTemperature;
 
-    @JsonProperty("llama_cpp_dir_path")
-    private File llamaCppDirPath;
+    @JsonProperty("llama_cli_path")
+    private File llamaCliPath;
 
     @JsonProperty("batch_thread_count")
     private int batchThreadCount;