Made the decision to initially stick to the Ollama LLM backend and not go down the whole c++ download and compile for MLC LLM usage. Update to the UI to now include the model downloaded for the Llamatik backend to use. Tested as working and downloading models successfully.

This commit is contained in:
2026-02-28 12:58:04 +11:00
parent 66e4b04ed8
commit 93a2c48e4b
7 changed files with 357 additions and 6 deletions

View File

@@ -3,7 +3,7 @@ plugins {
alias(libs.plugins.kotlin.android)
alias(libs.plugins.kotlin.compose)
id("com.chaquo.python") // Apply it here
id("kotlin-kapt") // Added for the Room Android database subsystem and libraries
id("com.google.devtools.ksp") // Added for the Room Android database subsystem and libraries
}
chaquopy {
@@ -84,5 +84,11 @@ dependencies {
// Room Database for local chat history
implementation("androidx.room:room-runtime:2.6.1")
implementation("androidx.room:room-ktx:2.6.1")
kapt("androidx.room:room-compiler:2.6.1")
ksp("androidx.room:room-compiler:2.6.1")
// Llama.cpp Kotlin Multiplatform Wrapper
implementation("com.llamatik:library:0.8.1")
// Extended Material Icons (for Download, CheckCircle, etc.)
implementation("androidx.compose.material:material-icons-extended")
}