Made the decision to initially stick to the Ollama LLM backend and not go down the whole c++ download and compile for MLC LLM usage. Update to the UI to now include the model downloaded for the Llamatik backend to use. Tested as working and downloading models successfully.
This commit is contained in:
@@ -3,7 +3,7 @@ plugins {
|
||||
alias(libs.plugins.kotlin.android)
|
||||
alias(libs.plugins.kotlin.compose)
|
||||
id("com.chaquo.python") // Apply it here
|
||||
id("kotlin-kapt") // Added for the Room Android database subsystem and libraries
|
||||
id("com.google.devtools.ksp") // Added for the Room Android database subsystem and libraries
|
||||
}
|
||||
|
||||
chaquopy {
|
||||
@@ -84,5 +84,11 @@ dependencies {
|
||||
// Room Database for local chat history
|
||||
implementation("androidx.room:room-runtime:2.6.1")
|
||||
implementation("androidx.room:room-ktx:2.6.1")
|
||||
kapt("androidx.room:room-compiler:2.6.1")
|
||||
ksp("androidx.room:room-compiler:2.6.1")
|
||||
|
||||
// Llama.cpp Kotlin Multiplatform Wrapper
|
||||
implementation("com.llamatik:library:0.8.1")
|
||||
|
||||
// Extended Material Icons (for Download, CheckCircle, etc.)
|
||||
implementation("androidx.compose.material:material-icons-extended")
|
||||
}
|
||||
53
app/src/main/java/net/mmanningau/alice/LlamaCppAdapter.kt
Normal file
53
app/src/main/java/net/mmanningau/alice/LlamaCppAdapter.kt
Normal file
@@ -0,0 +1,53 @@
|
||||
package net.mmanningau.alice
|
||||
|
||||
import dev.langchain4j.data.message.AiMessage
|
||||
import dev.langchain4j.data.message.ChatMessage
|
||||
import dev.langchain4j.data.message.SystemMessage
|
||||
import dev.langchain4j.data.message.UserMessage
|
||||
import dev.langchain4j.model.chat.ChatLanguageModel
|
||||
import dev.langchain4j.model.output.Response
|
||||
import java.io.File
|
||||
|
||||
class LlamaCppAdapter(private val modelPath: String) : ChatLanguageModel {
|
||||
|
||||
// We will initialize the native C++ engine here once the model is downloaded
|
||||
private var isEngineLoaded = false
|
||||
|
||||
private fun getOrInitEngine() {
|
||||
if (!isEngineLoaded) {
|
||||
val modelFile = File(modelPath)
|
||||
if (!modelFile.exists()) {
|
||||
throw IllegalStateException("Model file not found at: $modelPath. Please download a model first.")
|
||||
}
|
||||
// TODO: In the next step, we will call Llamatik's native init function here
|
||||
// to load the .gguf file into the device's memory.
|
||||
isEngineLoaded = true
|
||||
}
|
||||
}
|
||||
|
||||
override fun generate(messages: List<ChatMessage>): Response<AiMessage> {
|
||||
// Ensure the engine is booted
|
||||
getOrInitEngine()
|
||||
|
||||
// 1. Translation IN: Convert LangChain4j messages into a raw text prompt format
|
||||
// Local models usually need a specific prompt template (like ChatML).
|
||||
// We will build a basic conversational wrapper here.
|
||||
val promptBuilder = java.lang.StringBuilder()
|
||||
for (message in messages) {
|
||||
when (message) {
|
||||
is SystemMessage -> promptBuilder.append("System: ${message.text()}\n")
|
||||
is UserMessage -> promptBuilder.append("User: ${message.text()}\n")
|
||||
is AiMessage -> promptBuilder.append("Alice: ${message.text()}\n")
|
||||
}
|
||||
}
|
||||
promptBuilder.append("Alice: ")
|
||||
|
||||
// 2. Execution: (Placeholder until we wire the specific Llamatik execution call)
|
||||
// val responseText = LlamatikEngine.generate(promptBuilder.toString())
|
||||
val responseText = "This is a placeholder response from the local Llama.cpp engine!"
|
||||
|
||||
// 3. Translation OUT: Wrap the raw string back into LangChain4j's format
|
||||
val finalAiMessage = AiMessage(responseText)
|
||||
return Response.from(finalAiMessage)
|
||||
}
|
||||
}
|
||||
@@ -17,6 +17,9 @@ import androidx.compose.material.icons.filled.Menu
|
||||
import androidx.compose.material.icons.filled.Send
|
||||
import androidx.compose.material.icons.filled.Add
|
||||
import androidx.compose.material.icons.filled.List
|
||||
import androidx.compose.material.icons.filled.CheckCircle
|
||||
import androidx.compose.material.icons.filled.Download
|
||||
import androidx.compose.material3.LinearProgressIndicator
|
||||
import androidx.compose.material3.*
|
||||
import androidx.compose.runtime.*
|
||||
import androidx.compose.ui.Alignment
|
||||
@@ -92,6 +95,19 @@ fun MainChatScreen() {
|
||||
}
|
||||
)
|
||||
|
||||
// --- NEW: Conditional Model Manager Button ---
|
||||
if (LlmManager.currentMode == "Local") {
|
||||
NavigationDrawerItem(
|
||||
label = { Text("Model Manager") },
|
||||
selected = currentScreen == "ModelManager",
|
||||
icon = { Icon(Icons.Default.Add, contentDescription = "Download") }, // You can change this icon!
|
||||
onClick = {
|
||||
scope.launch { drawerState.close() }
|
||||
currentScreen = "ModelManager"
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
Spacer(modifier = Modifier.height(16.dp))
|
||||
HorizontalDivider()
|
||||
Text("Chat History", modifier = Modifier.padding(16.dp), style = MaterialTheme.typography.titleMedium, color = MaterialTheme.colorScheme.primary)
|
||||
@@ -203,6 +219,11 @@ fun MainChatScreen() {
|
||||
onBackClicked = { currentScreen = "Chat" }
|
||||
)
|
||||
}
|
||||
else if (currentScreen == "ModelManager") {
|
||||
ModelManagerScreen(
|
||||
onBackClicked = { currentScreen = "Chat" }
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -283,10 +304,10 @@ fun SettingsScreen(onBackClicked: () -> Unit) {
|
||||
Text("Remote API")
|
||||
Spacer(modifier = Modifier.width(16.dp))
|
||||
RadioButton(
|
||||
selected = llmMode == "MLC",
|
||||
onClick = { llmMode = "MLC" }
|
||||
selected = llmMode == "Local",
|
||||
onClick = { llmMode = "Local" }
|
||||
)
|
||||
Text("Local (MLC LLM)")
|
||||
Text("Local (Llama.cpp)")
|
||||
}
|
||||
|
||||
Spacer(modifier = Modifier.height(8.dp))
|
||||
@@ -378,4 +399,135 @@ fun SettingsScreen(onBackClicked: () -> Unit) {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@OptIn(ExperimentalMaterial3Api::class)
|
||||
@Composable
|
||||
fun ModelManagerScreen(onBackClicked: () -> Unit) {
|
||||
val context = LocalContext.current
|
||||
val scope = rememberCoroutineScope()
|
||||
val prefs = context.getSharedPreferences("AlicePrefs", Context.MODE_PRIVATE)
|
||||
|
||||
// Track which model the user currently has selected as their active brain
|
||||
var activeModelName by remember { mutableStateOf(prefs.getString("modelName", "") ?: "") }
|
||||
|
||||
// Keep track of download progress percentages for each model ID
|
||||
val downloadProgress = remember { mutableStateMapOf<String, Int>() }
|
||||
|
||||
Scaffold(
|
||||
topBar = {
|
||||
TopAppBar(
|
||||
title = { Text("Local Model Manager") },
|
||||
navigationIcon = {
|
||||
IconButton(onClick = onBackClicked) {
|
||||
Icon(Icons.Default.ArrowBack, contentDescription = "Back")
|
||||
}
|
||||
},
|
||||
colors = TopAppBarDefaults.topAppBarColors(
|
||||
containerColor = MaterialTheme.colorScheme.secondaryContainer,
|
||||
titleContentColor = MaterialTheme.colorScheme.onSecondaryContainer
|
||||
)
|
||||
)
|
||||
}
|
||||
) { paddingValues ->
|
||||
LazyColumn(
|
||||
modifier = Modifier
|
||||
.fillMaxSize()
|
||||
.padding(paddingValues)
|
||||
.padding(16.dp),
|
||||
verticalArrangement = Arrangement.spacedBy(16.dp)
|
||||
) {
|
||||
item {
|
||||
Text(
|
||||
"Qwen 2.5 Architecture",
|
||||
style = MaterialTheme.typography.titleMedium,
|
||||
color = MaterialTheme.colorScheme.primary
|
||||
)
|
||||
Text(
|
||||
"These models will run entirely on your device's GPU. Larger models are smarter but consume more battery and generate text slower.",
|
||||
style = MaterialTheme.typography.bodySmall,
|
||||
color = MaterialTheme.colorScheme.onSurfaceVariant
|
||||
)
|
||||
Spacer(modifier = Modifier.height(8.dp))
|
||||
}
|
||||
|
||||
items(ModelRegistry.curatedModels) { model ->
|
||||
val isDownloaded = ModelRegistry.isModelDownloaded(context, model.fileName)
|
||||
val currentProgress = downloadProgress[model.id] ?: 0
|
||||
val isActive = activeModelName == model.fileName
|
||||
|
||||
Card(
|
||||
modifier = Modifier.fillMaxWidth(),
|
||||
shape = RoundedCornerShape(12.dp),
|
||||
colors = CardDefaults.cardColors(containerColor = MaterialTheme.colorScheme.surfaceVariant)
|
||||
) {
|
||||
Column(modifier = Modifier.padding(16.dp)) {
|
||||
Row(
|
||||
modifier = Modifier.fillMaxWidth(),
|
||||
horizontalArrangement = Arrangement.SpaceBetween,
|
||||
verticalAlignment = Alignment.CenterVertically
|
||||
) {
|
||||
Text(model.name, style = MaterialTheme.typography.titleMedium)
|
||||
Text("${model.sizeMb} MB", style = MaterialTheme.typography.labelMedium)
|
||||
}
|
||||
|
||||
Spacer(modifier = Modifier.height(4.dp))
|
||||
Text(model.description, style = MaterialTheme.typography.bodySmall)
|
||||
Spacer(modifier = Modifier.height(16.dp))
|
||||
|
||||
if (isDownloaded) {
|
||||
Button(
|
||||
onClick = {
|
||||
// Save the exact filename so LlmManager knows which one to boot up
|
||||
prefs.edit().putString("modelName", model.fileName).apply()
|
||||
activeModelName = model.fileName
|
||||
},
|
||||
modifier = Modifier.fillMaxWidth(),
|
||||
colors = ButtonDefaults.buttonColors(
|
||||
containerColor = if (isActive) MaterialTheme.colorScheme.primary else MaterialTheme.colorScheme.secondary
|
||||
)
|
||||
) {
|
||||
if (isActive) {
|
||||
Icon(Icons.Default.CheckCircle, contentDescription = "Active")
|
||||
Spacer(modifier = Modifier.width(8.dp))
|
||||
Text("Active Model")
|
||||
} else {
|
||||
Text("Set as Active")
|
||||
}
|
||||
}
|
||||
} else if (currentProgress > 0 && currentProgress < 100) {
|
||||
Column(modifier = Modifier.fillMaxWidth()) {
|
||||
Text("Downloading: $currentProgress%", style = MaterialTheme.typography.labelMedium)
|
||||
Spacer(modifier = Modifier.height(4.dp))
|
||||
LinearProgressIndicator(
|
||||
progress = { currentProgress / 100f },
|
||||
modifier = Modifier.fillMaxWidth()
|
||||
)
|
||||
}
|
||||
} else {
|
||||
Button(
|
||||
onClick = {
|
||||
// Initialize progress
|
||||
downloadProgress[model.id] = 1
|
||||
|
||||
// Launch the background download
|
||||
scope.launch {
|
||||
ModelDownloader.downloadModel(context, model.downloadUrl, model.fileName)
|
||||
.collect { progress ->
|
||||
downloadProgress[model.id] = progress
|
||||
}
|
||||
}
|
||||
},
|
||||
modifier = Modifier.fillMaxWidth()
|
||||
) {
|
||||
Icon(Icons.Default.Download, contentDescription = "Download")
|
||||
Spacer(modifier = Modifier.width(8.dp))
|
||||
Text("Download")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
71
app/src/main/java/net/mmanningau/alice/ModelDownloader.kt
Normal file
71
app/src/main/java/net/mmanningau/alice/ModelDownloader.kt
Normal file
@@ -0,0 +1,71 @@
|
||||
package net.mmanningau.alice
|
||||
|
||||
import android.app.DownloadManager
|
||||
import android.content.Context
|
||||
import android.net.Uri
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.delay
|
||||
import kotlinx.coroutines.flow.Flow
|
||||
import kotlinx.coroutines.flow.flow
|
||||
import kotlinx.coroutines.flow.flowOn
|
||||
import java.io.File
|
||||
|
||||
object ModelDownloader {
|
||||
|
||||
fun downloadModel(context: Context, url: String, fileName: String): Flow<Int> = flow {
|
||||
val downloadManager = context.getSystemService(Context.DOWNLOAD_SERVICE) as DownloadManager
|
||||
|
||||
// Ensure the directory exists
|
||||
val modelsDir = ModelRegistry.getModelsDirectory(context)
|
||||
|
||||
val request = DownloadManager.Request(Uri.parse(url))
|
||||
.setTitle(fileName)
|
||||
.setDescription("Downloading AI Model for Alice...")
|
||||
.setNotificationVisibility(DownloadManager.Request.VISIBILITY_VISIBLE_NOTIFY_COMPLETED)
|
||||
// Save it directly into our app's specific Models folder
|
||||
.setDestinationUri(Uri.fromFile(File(modelsDir, fileName)))
|
||||
.setAllowedOverMetered(true) // Allow cellular downloads
|
||||
|
||||
val downloadId = downloadManager.enqueue(request)
|
||||
var finishDownload = false
|
||||
var progress = 0
|
||||
|
||||
// Ping the OS every second to get the latest percentage
|
||||
while (!finishDownload) {
|
||||
val query = DownloadManager.Query().setFilterById(downloadId)
|
||||
val cursor = downloadManager.query(query)
|
||||
|
||||
if (cursor.moveToFirst()) {
|
||||
val statusIndex = cursor.getColumnIndex(DownloadManager.COLUMN_STATUS)
|
||||
val status = cursor.getInt(statusIndex)
|
||||
|
||||
when (status) {
|
||||
DownloadManager.STATUS_SUCCESSFUL -> {
|
||||
finishDownload = true
|
||||
emit(100)
|
||||
}
|
||||
DownloadManager.STATUS_FAILED -> {
|
||||
finishDownload = true
|
||||
emit(-1) // Error state
|
||||
}
|
||||
DownloadManager.STATUS_RUNNING -> {
|
||||
val downloadedIndex = cursor.getColumnIndex(DownloadManager.COLUMN_BYTES_DOWNLOADED_SO_FAR)
|
||||
val totalIndex = cursor.getColumnIndex(DownloadManager.COLUMN_TOTAL_SIZE_BYTES)
|
||||
val bytesDownloaded = cursor.getLong(downloadedIndex)
|
||||
val bytesTotal = cursor.getLong(totalIndex)
|
||||
|
||||
if (bytesTotal > 0) {
|
||||
progress = ((bytesDownloaded * 100L) / bytesTotal).toInt()
|
||||
emit(progress)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cursor.close()
|
||||
|
||||
if (!finishDownload) {
|
||||
delay(1000)
|
||||
}
|
||||
}
|
||||
}.flowOn(Dispatchers.IO)
|
||||
}
|
||||
68
app/src/main/java/net/mmanningau/alice/ModelRegistry.kt
Normal file
68
app/src/main/java/net/mmanningau/alice/ModelRegistry.kt
Normal file
@@ -0,0 +1,68 @@
|
||||
package net.mmanningau.alice
|
||||
|
||||
import android.content.Context
|
||||
import java.io.File
|
||||
|
||||
data class LocalModel(
|
||||
val id: String,
|
||||
val name: String,
|
||||
val description: String,
|
||||
val fileName: String,
|
||||
val downloadUrl: String,
|
||||
val sizeMb: Int
|
||||
)
|
||||
|
||||
object ModelRegistry {
|
||||
|
||||
val curatedModels = listOf(
|
||||
LocalModel(
|
||||
id = "qwen-0.5b",
|
||||
name = "Qwen 2.5 (0.5B)",
|
||||
description = "Ultra-light and lightning fast. Best for quick tasks and basic tool triggering.",
|
||||
fileName = "qwen2.5-0.5b-instruct-q4_k_m.gguf",
|
||||
downloadUrl = "https://huggingface.co/Qwen/Qwen2.5-0.5B-Instruct-GGUF/resolve/main/qwen2.5-0.5b-instruct-q4_k_m.gguf",
|
||||
sizeMb = 398
|
||||
),
|
||||
LocalModel(
|
||||
id = "qwen-1.5b",
|
||||
name = "Qwen 2.5 (1.5B)",
|
||||
description = "The perfect daily driver. Excellent balance of speed, intelligence, and battery efficiency.",
|
||||
fileName = "qwen2.5-1.5b-instruct-q4_k_m.gguf",
|
||||
downloadUrl = "https://huggingface.co/Qwen/Qwen2.5-1.5B-Instruct-GGUF/resolve/main/qwen2.5-1.5b-instruct-q4_k_m.gguf",
|
||||
sizeMb = 1120
|
||||
),
|
||||
LocalModel(
|
||||
id = "qwen-coder-3b",
|
||||
name = "Qwen 2.5 Coder (3B)",
|
||||
description = "Specialized for programming. Fantastic for generating Python scripts and home lab configurations.",
|
||||
fileName = "qwen2.5-coder-3b-instruct-q4_k_m.gguf",
|
||||
downloadUrl = "https://huggingface.co/Qwen/Qwen2.5-Coder-3B-Instruct-GGUF/resolve/main/qwen2.5-coder-3b-instruct-q4_k_m.gguf",
|
||||
sizeMb = 2020
|
||||
),
|
||||
LocalModel(
|
||||
id = "qwen-3b",
|
||||
name = "Qwen 2.5 (3B)",
|
||||
description = "The Heavyweight. The highest quality conversational responses your device can comfortably run.",
|
||||
fileName = "qwen2.5-3b-instruct-q4_k_m.gguf",
|
||||
downloadUrl = "https://huggingface.co/Qwen/Qwen2.5-3B-Instruct-GGUF/resolve/main/qwen2.5-3b-instruct-q4_k_m.gguf",
|
||||
sizeMb = 2020
|
||||
)
|
||||
)
|
||||
|
||||
fun getModelsDirectory(context: Context): File {
|
||||
val dir = File(context.getExternalFilesDir(null), "Models")
|
||||
if (!dir.exists()) {
|
||||
dir.mkdirs()
|
||||
}
|
||||
return dir
|
||||
}
|
||||
|
||||
fun isModelDownloaded(context: Context, fileName: String): Boolean {
|
||||
val file = File(getModelsDirectory(context), fileName)
|
||||
return file.exists() && file.length() > 0
|
||||
}
|
||||
|
||||
fun getModelPath(context: Context, fileName: String): String {
|
||||
return File(getModelsDirectory(context), fileName).absolutePath
|
||||
}
|
||||
}
|
||||
@@ -5,4 +5,5 @@ plugins {
|
||||
alias(libs.plugins.kotlin.compose) apply false
|
||||
// Add the Chaquopy plugin here
|
||||
id("com.chaquo.python") version "15.0.1" apply false
|
||||
id("com.google.devtools.ksp") version "2.2.0-2.0.2" apply false
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
[versions]
|
||||
agp = "8.13.2"
|
||||
kotlin = "2.0.21"
|
||||
kotlin = "2.2.0"
|
||||
coreKtx = "1.17.0"
|
||||
junit = "4.13.2"
|
||||
junitVersion = "1.3.0"
|
||||
|
||||
Reference in New Issue
Block a user