diff --git a/app/build.gradle.kts b/app/build.gradle.kts
index e60a72b..95d8d0f 100644
--- a/app/build.gradle.kts
+++ b/app/build.gradle.kts
@@ -3,6 +3,7 @@ plugins {
alias(libs.plugins.kotlin.android)
alias(libs.plugins.kotlin.compose)
id("com.chaquo.python") // Apply it here
+ id("kotlin-kapt") // Added for the Room Android database subsystem and libraries
}
chaquopy {
@@ -79,4 +80,9 @@ dependencies {
coreLibraryDesugaring("com.android.tools:desugar_jdk_libs:2.0.4")
+
+ // Room Database for local chat history
+ implementation("androidx.room:room-runtime:2.6.1")
+ implementation("androidx.room:room-ktx:2.6.1")
+ kapt("androidx.room:room-compiler:2.6.1")
}
\ No newline at end of file
diff --git a/app/src/androidTest/java/net/mmanningau/alice/ExampleInstrumentedTest.kt b/app/src/androidTest/java/net/mmanningau/alice/ExampleInstrumentedTest.kt
index 88592f3..f98ad07 100644
--- a/app/src/androidTest/java/net/mmanningau/alice/ExampleInstrumentedTest.kt
+++ b/app/src/androidTest/java/net/mmanningau/alice/ExampleInstrumentedTest.kt
@@ -19,6 +19,6 @@ class ExampleInstrumentedTest {
fun useAppContext() {
// Context of the app under test.
val appContext = InstrumentationRegistry.getInstrumentation().targetContext
- assertEquals("net.mmanningau.alice", appContext.packageName)
+ assertEquals("net.mmanningau.`AliceApp.kt`", appContext.packageName)
}
}
\ No newline at end of file
diff --git a/app/src/main/AndroidManifest.xml b/app/src/main/AndroidManifest.xml
index 0d9ba88..22b893b 100644
--- a/app/src/main/AndroidManifest.xml
+++ b/app/src/main/AndroidManifest.xml
@@ -5,7 +5,7 @@
+
+ @Query("UPDATE threads SET title = :newTitle WHERE id = :threadId")
+ fun updateThreadTitle(threadId: Long, newTitle: String)
+
+ @Insert
+ fun insertMessage(message: ChatMessageEntity)
+
+ @Query("SELECT * FROM messages WHERE threadId = :threadId ORDER BY timestamp ASC")
+ fun getMessagesForThread(threadId: Long): List
+}
+
+// 4. The Database Instance
+@Database(entities = [ChatThread::class, ChatMessageEntity::class], version = 1, exportSchema = false)
+abstract class AliceDatabase : RoomDatabase() {
+ abstract fun chatDao(): ChatDao
+}
\ No newline at end of file
diff --git a/app/src/main/java/net/mmanningau/alice/LlmManager.kt b/app/src/main/java/net/mmanningau/alice/LlmManager.kt
index a323310..78198ee 100644
--- a/app/src/main/java/net/mmanningau/alice/LlmManager.kt
+++ b/app/src/main/java/net/mmanningau/alice/LlmManager.kt
@@ -8,37 +8,122 @@ import dev.langchain4j.data.message.UserMessage
import dev.langchain4j.model.chat.ChatLanguageModel
import dev.langchain4j.model.openai.OpenAiChatModel
import java.time.Duration
+import java.text.SimpleDateFormat
+import java.util.Date
+import java.util.Locale
object LlmManager {
- // 1. We make the model variable so we can rebuild it when settings change
private var chatModel: ChatLanguageModel? = null
+ var currentMode: String = "Remote"
+ private set
- private val chatHistory = mutableListOf(
- SystemMessage("You are Alice, a highly capable local AI assistant. You provide concise, direct answers.")
- )
+ // Database tracking
+ private var chatDao: ChatDao? = null
+ var currentThreadId: Long = -1L
+ private set
- // 2. The UI will call this when you hit "Save"
- fun initialize(url: String, modelName: String) {
- chatModel = OpenAiChatModel.builder()
- .baseUrl(url)
- .apiKey("dummy-key")
- .modelName(modelName)
- .timeout(Duration.ofMinutes(3))
- .maxRetries(0)
- .logRequests(true)
- .logResponses(true)
- .build()
+ private var activeSystemPrompt: String = "You are a helpful AI assistant."
+ private val chatHistory = mutableListOf()
+
+ // Initialization now makes the dao optional so the UI can safely call it!
+ fun initialize(
+ dao: ChatDao?, mode: String, url: String, modelName: String, apiKey: String, systemPrompt: String
+ ) {
+ // Only update the DAO if one was passed in (like on app boot)
+ if (dao != null) {
+ chatDao = dao
+ }
+
+ currentMode = mode
+ activeSystemPrompt = if (systemPrompt.isNotBlank()) systemPrompt else "You are a helpful AI assistant."
+
+ // Configure the Model Engine
+ if (mode == "Remote") {
+ val safeApiKey = if (apiKey.isNotBlank()) apiKey else "dummy-key"
+ chatModel = OpenAiChatModel.builder()
+ .baseUrl(url)
+ .apiKey(safeApiKey)
+ .modelName(modelName)
+ .timeout(Duration.ofMinutes(3))
+ .maxRetries(0)
+ .logRequests(true)
+ .logResponses(true)
+ .build()
+ } else {
+ chatModel = null // MLC Engine goes here later!
+ }
+
+ // Database Startup Logic
+ val existingThreads = chatDao?.getAllThreads() ?: emptyList()
+ if (existingThreads.isEmpty()) {
+ startNewThread()
+ } else if (currentThreadId == -1L) {
+ loadThread(existingThreads.first().id)
+ }
+ }
+
+ // Creates a blank slate in the DB and LangChain4j memory
+ fun startNewThread(): List {
+ val dateFormat = SimpleDateFormat("MMM dd, HH:mm", Locale.getDefault())
+ val title = "Chat - ${dateFormat.format(Date())}"
+
+ currentThreadId = chatDao?.insertThread(ChatThread(title = title)) ?: -1L
+
+ chatHistory.clear()
+ chatHistory.add(SystemMessage(activeSystemPrompt))
+
+ return emptyList() // Returns an empty list to clear the UI
+ }
+
+ // Pulls history from the DB and translates it to LangChain4j memory AND UI memory
+ fun loadThread(threadId: Long): List {
+ currentThreadId = threadId
+ chatHistory.clear()
+ chatHistory.add(SystemMessage(activeSystemPrompt))
+
+ val dbMessages = chatDao?.getMessagesForThread(threadId) ?: emptyList()
+ val uiMessages = mutableListOf()
+
+ for (msg in dbMessages) {
+ if (msg.isUser) {
+ chatHistory.add(UserMessage(msg.text))
+ uiMessages.add(net.mmanningau.alice.ChatMessage(msg.text, true))
+ } else {
+ chatHistory.add(AiMessage(msg.text))
+ uiMessages.add(net.mmanningau.alice.ChatMessage(msg.text, false))
+ }
+ }
+ return uiMessages
+ }
+
+ fun getAllThreads(): List {
+ return chatDao?.getAllThreads() ?: emptyList()
}
fun chat(userText: String): String {
- // Safety check in case the model hasn't been built yet
- val currentModel = chatModel ?: return "Error: LLM engine not initialized. Please check Settings."
+ if (currentMode == "MLC") return "System: MLC LLM On-Device engine is selected but not yet installed."
+ val currentModel = chatModel ?: return "Error: LLM engine not initialized."
+ // If the history size is 1, it means only the System prompt exists. This is the first message!
+ if (chatHistory.size == 1) {
+ // Take the first 25 characters. If it's longer, add "..."
+ val previewLength = 25
+ val newTitle = if (userText.length > previewLength) {
+ userText.take(previewLength).trim() + "..."
+ } else {
+ userText
+ }
+ // Update the database instantly
+ chatDao?.updateThreadTitle(currentThreadId, newTitle)
+ }
+
+ // 1. Save user message to DB and Memory
+ chatDao?.insertMessage(ChatMessageEntity(threadId = currentThreadId, text = userText, isUser = true))
chatHistory.add(UserMessage(userText))
+
val toolSpecs = SkillManager.loadSkills()
- // Use the active model
var response = currentModel.generate(chatHistory, toolSpecs)
var aiMessage: AiMessage = response.content()
chatHistory.add(aiMessage)
@@ -47,18 +132,17 @@ object LlmManager {
for (request in aiMessage.toolExecutionRequests()) {
val toolName = request.name()
val arguments = request.arguments()
-
val toolResult = SkillManager.executeSkill(toolName, arguments)
-
- val toolMessage = ToolExecutionResultMessage(request.id(), toolName, toolResult)
- chatHistory.add(toolMessage)
+ chatHistory.add(ToolExecutionResultMessage(request.id(), toolName, toolResult))
}
-
response = currentModel.generate(chatHistory, toolSpecs)
aiMessage = response.content()
chatHistory.add(aiMessage)
}
+ // 2. Save final AI message to DB
+ chatDao?.insertMessage(ChatMessageEntity(threadId = currentThreadId, text = aiMessage.text(), isUser = false))
+
return aiMessage.text()
}
}
\ No newline at end of file
diff --git a/app/src/main/java/net/mmanningau/alice/MainActivity.kt b/app/src/main/java/net/mmanningau/alice/MainActivity.kt
index 1f1d835..7ec1fd6 100644
--- a/app/src/main/java/net/mmanningau/alice/MainActivity.kt
+++ b/app/src/main/java/net/mmanningau/alice/MainActivity.kt
@@ -15,6 +15,8 @@ import androidx.compose.material.icons.filled.ArrowBack
import androidx.compose.material.icons.filled.Build
import androidx.compose.material.icons.filled.Menu
import androidx.compose.material.icons.filled.Send
+import androidx.compose.material.icons.filled.Add
+import androidx.compose.material.icons.filled.List
import androidx.compose.material3.*
import androidx.compose.runtime.*
import androidx.compose.ui.Alignment
@@ -43,19 +45,25 @@ class MainActivity : ComponentActivity() {
@OptIn(ExperimentalMaterial3Api::class)
@Composable
fun MainChatScreen() {
- // State management for the drawer and the chat
val drawerState = rememberDrawerState(initialValue = DrawerValue.Closed)
val scope = rememberCoroutineScope()
var currentScreen by remember { mutableStateOf("Chat") }
var inputText by remember { mutableStateOf("") }
- // We start with a dummy greeting message
- var messages by remember {
- mutableStateOf(listOf(ChatMessage("Hello! I am your local agent. How can I help?", false)))
+ // Start with whatever thread the LlmManager loaded on boot
+ var messages by remember { mutableStateOf(LlmManager.loadThread(LlmManager.currentThreadId)) }
+
+ // Track threads for the drawer menu
+ var availableThreads by remember { mutableStateOf(listOf()) }
+
+ // Fetch threads from DB when the drawer opens
+ LaunchedEffect(drawerState.isOpen) {
+ if (drawerState.isOpen) {
+ availableThreads = LlmManager.getAllThreads()
+ }
}
- // 2. The Slide-out Drawer Setup
ModalNavigationDrawer(
drawerState = drawerState,
drawerContent = {
@@ -64,33 +72,50 @@ fun MainChatScreen() {
Text("Alice Configuration", modifier = Modifier.padding(16.dp), style = MaterialTheme.typography.titleLarge)
HorizontalDivider()
- // Navigation item to go back to Chat
NavigationDrawerItem(
label = { Text("Chat Interface") },
selected = currentScreen == "Chat",
icon = { Icon(Icons.Default.Send, contentDescription = "Chat") },
onClick = {
scope.launch { drawerState.close() }
- currentScreen = "Chat" // Swap screen
+ currentScreen = "Chat"
}
)
- // Navigation item for Skills Library
NavigationDrawerItem(
label = { Text("Settings & Skills") },
selected = currentScreen == "Skills",
icon = { Icon(Icons.Default.Build, contentDescription = "Skills") },
onClick = {
scope.launch { drawerState.close() }
- currentScreen = "Skills" // Swap screen
+ currentScreen = "Skills"
}
)
+
+ Spacer(modifier = Modifier.height(16.dp))
+ HorizontalDivider()
+ Text("Chat History", modifier = Modifier.padding(16.dp), style = MaterialTheme.typography.titleMedium, color = MaterialTheme.colorScheme.primary)
+
+ // The dynamic list of past threads!
+ LazyColumn {
+ items(availableThreads) { thread ->
+ NavigationDrawerItem(
+ label = { Text(thread.title) },
+ selected = thread.id == LlmManager.currentThreadId,
+ icon = { Icon(Icons.Default.List, contentDescription = "Thread") },
+ onClick = {
+ // Load the selected thread from the DB into the UI
+ messages = LlmManager.loadThread(thread.id)
+ scope.launch { drawerState.close() }
+ currentScreen = "Chat"
+ }
+ )
+ }
+ }
}
}
) {
- // THE ROUTER LOGIC
if (currentScreen == "Chat") {
- // 3. The Main Screen Layout (Scaffold provides the top bar and body)
Scaffold(
topBar = {
TopAppBar(
@@ -100,35 +125,38 @@ fun MainChatScreen() {
Icon(Icons.Default.Menu, contentDescription = "Menu")
}
},
+ // ADDED: The New Chat Button
+ actions = {
+ IconButton(onClick = {
+ messages = LlmManager.startNewThread()
+ }) {
+ Icon(Icons.Default.Add, contentDescription = "New Chat")
+ }
+ },
colors = TopAppBarDefaults.topAppBarColors(
containerColor = MaterialTheme.colorScheme.primaryContainer,
titleContentColor = MaterialTheme.colorScheme.onPrimaryContainer
)
)
}
- ) { paddingValues -> // <-- Notice how this now safely wraps the Column below!
-
- // 4. The Chat Area
+ ) { paddingValues ->
Column(
modifier = Modifier
.fillMaxSize()
.padding(paddingValues)
) {
- // The scrolling list of messages
LazyColumn(
modifier = Modifier
.weight(1f)
.fillMaxWidth()
.padding(horizontal = 8.dp),
- reverseLayout = true // Starts at the bottom like a real chat app
+ reverseLayout = true
) {
- // We reverse the list so the newest is at the bottom
items(messages.reversed()) { message ->
ChatBubble(message)
}
}
- // 5. The Input Field
Row(
modifier = Modifier
.fillMaxWidth()
@@ -149,22 +177,14 @@ fun MainChatScreen() {
onClick = {
if (inputText.isNotBlank()) {
val userText = inputText
-
- // Add user message to UI
messages = messages + ChatMessage(userText, true)
- // Clear input field
inputText = ""
- // Launch background thread to talk to LangChain4j
scope.launch(Dispatchers.IO) {
try {
- // Send it to the local LLM!
val response = LlmManager.chat(userText)
-
- // Compose state automatically handles switching back to the main thread for UI updates
messages = messages + ChatMessage(response, false)
} catch (e: Exception) {
- Log.e("AliceNetwork", "LLM Connection Failed", e)
messages = messages + ChatMessage("Connection Error: Is the local LLM server running?", false)
}
}
@@ -177,10 +197,8 @@ fun MainChatScreen() {
}
}
}
- } // <-- Scaffold closes here
-
+ }
} else if (currentScreen == "Skills") {
- // LOAD THE NEW SCREEN
SettingsScreen(
onBackClicked = { currentScreen = "Chat" }
)
@@ -221,12 +239,16 @@ fun SettingsScreen(onBackClicked: () -> Unit) {
val context = LocalContext.current
val prefs = context.getSharedPreferences("AlicePrefs", Context.MODE_PRIVATE)
- // Load state from SharedPreferences
+ // 1. ALL State Variables Declared Here!
var llmMode by remember { mutableStateOf(prefs.getString("llmMode", "Remote") ?: "Remote") }
var llmUrl by remember { mutableStateOf(prefs.getString("llmUrl", "http://10.0.2.2:11434/v1") ?: "") }
var modelName by remember { mutableStateOf(prefs.getString("modelName", "llama3.2") ?: "") }
+ var apiKey by remember { mutableStateOf(prefs.getString("apiKey", "") ?: "") } // The missing key!
var skillsPath by remember { mutableStateOf(prefs.getString("skillsPath", SkillManager.skillsDirectory?.absolutePath ?: "") ?: "") }
+ val defaultPrompt = "You are Alice, a highly capable local AI assistant. You provide concise, direct answers."
+ var systemPrompt by remember { mutableStateOf(prefs.getString("systemPrompt", defaultPrompt) ?: defaultPrompt) } // The missing prompt!
+
Scaffold(
topBar = {
TopAppBar(
@@ -243,7 +265,6 @@ fun SettingsScreen(onBackClicked: () -> Unit) {
)
}
) { paddingValues ->
- // We use verticalScroll so the keyboard doesn't hide the text fields!
Column(
modifier = Modifier
.fillMaxSize()
@@ -254,41 +275,72 @@ fun SettingsScreen(onBackClicked: () -> Unit) {
Text("LLM Engine Configuration", style = MaterialTheme.typography.titleMedium, color = MaterialTheme.colorScheme.primary)
Spacer(modifier = Modifier.height(8.dp))
- // The Toggle!
Row(verticalAlignment = Alignment.CenterVertically) {
RadioButton(
selected = llmMode == "Remote",
- onClick = {
- llmMode = "Remote"
- llmUrl = "http://10.0.2.2:11434/v1" // Auto-fill standard Ollama host
- }
+ onClick = { llmMode = "Remote" }
)
- Text("Remote Host (Ollama)")
+ Text("Remote API")
Spacer(modifier = Modifier.width(16.dp))
RadioButton(
- selected = llmMode == "Local",
- onClick = {
- llmMode = "Local"
- llmUrl = "http://localhost:8080/v1" // Auto-fill standard Llama.cpp host
- }
+ selected = llmMode == "MLC",
+ onClick = { llmMode = "MLC" }
)
- Text("On-Device")
+ Text("Local (MLC LLM)")
}
Spacer(modifier = Modifier.height(8.dp))
+
OutlinedTextField(
- value = llmUrl,
- onValueChange = { llmUrl = it },
- label = { Text("LLM Server URL") },
+ value = modelName,
+ onValueChange = { modelName = it },
+ label = { Text("Model Name (e.g., llama3.2 or qwen2.5)") },
modifier = Modifier.fillMaxWidth()
)
Spacer(modifier = Modifier.height(8.dp))
+
+ if (llmMode == "Remote") {
+ OutlinedTextField(
+ value = llmUrl,
+ onValueChange = { llmUrl = it },
+ label = { Text("Provider URL (Ollama, OpenAI, Gemini)") },
+ modifier = Modifier.fillMaxWidth()
+ )
+ Spacer(modifier = Modifier.height(8.dp))
+ OutlinedTextField(
+ value = apiKey,
+ onValueChange = { apiKey = it },
+ label = { Text("API Key (Leave blank for Ollama)") },
+ modifier = Modifier.fillMaxWidth()
+ )
+ } else {
+ Surface(
+ color = MaterialTheme.colorScheme.surfaceVariant,
+ shape = RoundedCornerShape(8.dp),
+ modifier = Modifier.fillMaxWidth()
+ ) {
+ Text(
+ text = "Native GPU inference does not require network URLs or API keys. Make sure you have downloaded the required MLC model files to your device.",
+ modifier = Modifier.padding(16.dp),
+ style = MaterialTheme.typography.bodySmall,
+ color = MaterialTheme.colorScheme.onSurfaceVariant
+ )
+ }
+ }
+
+ Spacer(modifier = Modifier.height(32.dp))
+ Text("AI Persona", style = MaterialTheme.typography.titleMedium, color = MaterialTheme.colorScheme.primary)
+ Spacer(modifier = Modifier.height(8.dp))
+
OutlinedTextField(
- value = modelName,
- onValueChange = { modelName = it },
- label = { Text("Model Name (e.g., llama3.2)") },
- modifier = Modifier.fillMaxWidth()
+ value = systemPrompt,
+ onValueChange = { systemPrompt = it },
+ label = { Text("System Prompt") },
+ modifier = Modifier
+ .fillMaxWidth()
+ .height(140.dp),
+ maxLines = 6
)
Spacer(modifier = Modifier.height(32.dp))
@@ -305,19 +357,18 @@ fun SettingsScreen(onBackClicked: () -> Unit) {
Spacer(modifier = Modifier.height(32.dp))
Button(
onClick = {
- // 1. Save all inputs to device memory
prefs.edit()
.putString("llmMode", llmMode)
.putString("llmUrl", llmUrl)
.putString("modelName", modelName)
+ .putString("apiKey", apiKey)
.putString("skillsPath", skillsPath)
+ .putString("systemPrompt", systemPrompt)
.apply()
- // 2. Hot-reload the engines with the new settings
- LlmManager.initialize(llmUrl, modelName)
+ LlmManager.initialize(null, llmMode, llmUrl, modelName, apiKey, systemPrompt)
SkillManager.updateDirectory(skillsPath)
- // 3. Return to chat
onBackClicked()
},
modifier = Modifier.fillMaxWidth(),
diff --git a/app/src/main/java/net/mmanningau/alice/net/mmanningau/AliceApp.kt b/app/src/main/java/net/mmanningau/alice/net/mmanningau/AliceApp.kt
new file mode 100644
index 0000000..b01a9ba
--- /dev/null
+++ b/app/src/main/java/net/mmanningau/alice/net/mmanningau/AliceApp.kt
@@ -0,0 +1,43 @@
+package net.mmanningau.alice
+
+import android.app.Application
+import android.content.Context
+import androidx.room.Room // Added for the Room Android database subsystem and libraries
+import com.chaquo.python.Python
+import com.chaquo.python.android.AndroidPlatform
+
+class AliceApp : Application() {
+ override fun onCreate() {
+ super.onCreate()
+
+ if (!Python.isStarted()) {
+ Python.start(AndroidPlatform(this))
+ }
+
+ val prefs = getSharedPreferences("AlicePrefs", Context.MODE_PRIVATE)
+ val savedMode = prefs.getString("llmMode", "Remote") ?: "Remote"
+ val savedUrl = prefs.getString("llmUrl", "http://10.0.2.2:11434/v1") ?: "http://10.0.2.2:11434/v1"
+ val savedModel = prefs.getString("modelName", "llama3.2") ?: "qwen3:8b"
+ val savedApiKey = prefs.getString("apiKey", "") ?: ""
+ val savedSkillsPath = prefs.getString("skillsPath", "") ?: ""
+
+ val defaultPrompt = "You are Alice, a highly capable local AI assistant. You provide concise, direct answers."
+ val savedSystemPrompt = prefs.getString("systemPrompt", defaultPrompt) ?: defaultPrompt
+
+ SkillManager.initialize(this)
+ if (savedSkillsPath.isNotBlank()) {
+ SkillManager.updateDirectory(savedSkillsPath)
+ } else {
+ prefs.edit().putString("skillsPath", SkillManager.skillsDirectory?.absolutePath).apply()
+ }
+
+ // NEW: Build the local database
+ val db = Room.databaseBuilder(
+ applicationContext,
+ AliceDatabase::class.java, "`AliceApp.kt`-chat-database"
+ ).allowMainThreadQueries().build() // We use allowMainThreadQueries for immediate boot loading
+
+ // Pass the DAO into the manager!
+ LlmManager.initialize(db.chatDao(), savedMode, savedUrl, savedModel, savedApiKey, savedSystemPrompt)
+ }
+}
\ No newline at end of file
diff --git a/app/src/main/java/net/mmanningau/alice/net/mmanningau/alice.kt b/app/src/main/java/net/mmanningau/alice/net/mmanningau/alice.kt
deleted file mode 100644
index f968032..0000000
--- a/app/src/main/java/net/mmanningau/alice/net/mmanningau/alice.kt
+++ /dev/null
@@ -1,35 +0,0 @@
-package net.mmanningau.alice
-
-import android.app.Application
-import android.content.Context
-import com.chaquo.python.Python
-import com.chaquo.python.android.AndroidPlatform
-
-class alice : Application() {
- override fun onCreate() {
- super.onCreate()
-
- // This boots the Python environment the moment the app starts
- if (!Python.isStarted()) {
- Python.start(AndroidPlatform(this))
- }
- // 1. Grab saved settings (or defaults if it's the first time booting)
- val prefs = getSharedPreferences("AlicePrefs", Context.MODE_PRIVATE)
- val savedUrl = prefs.getString("llmUrl", "http://10.0.2.2:11434/v1") ?: "http://10.0.2.2:11434/v1"
- val savedModel = prefs.getString("modelName", "llama3.2") ?: "llama3.2"
- val savedSkillsPath = prefs.getString("skillsPath", "") ?: ""
-
- // 2. Initialize the Skills folder
- SkillManager.initialize(this) // This creates the safe default
-
- if (savedSkillsPath.isNotBlank()) {
- SkillManager.updateDirectory(savedSkillsPath) // Override if the user saved a custom one
- } else {
- // If it's the first boot, save the default path so the UI can display it
- prefs.edit().putString("skillsPath", SkillManager.skillsDirectory?.absolutePath).apply()
- }
-
- // 3. Boot the LLM Engine
- LlmManager.initialize(savedUrl, savedModel)
- }
-}
\ No newline at end of file