Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion app/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ dependencies {
// MediaPipe GenAI for offline inference (LLM)
implementation("com.google.mediapipe:tasks-genai:0.10.32")
// LiteRT-LM for newer offline .litertlm models (e.g. Gemma 4 E4B it)
implementation("com.google.ai.edge.litertlm:litertlm-android:0.0.0-alpha06")
implementation("com.google.ai.edge.litertlm:litertlm-android:0.10.0")

// Camera Core to potentially fix missing JNI lib issue
implementation("androidx.camera:camera-core:1.4.0")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -337,15 +337,30 @@ class PhotoReasoningViewModel(
if (!isLiteRtAbiSupported()) {
return "Gemma 4 offline is only supported on arm64-v8a or x86_64 devices."
}
Log.i(
TAG,
"Initializing Gemma 4 LiteRT engine. preferredBackend=$backend, " +
"abis=${Build.SUPPORTED_ABIS?.joinToString() ?: "unknown"}, " +
"modelPath=${modelFile.absolutePath}, modelSizeBytes=${modelFile.length()}"
)
if (liteRtEngine == null) {
val liteRtBackend = if (backend == InferenceBackend.GPU) Backend.GPU else Backend.CPU
val engineConfig = EngineConfig(
val preferredBackend = if (backend == InferenceBackend.GPU) Backend.GPU() else Backend.CPU()
val preferredVisionBackend = if (currentModel.supportsScreenshot) Backend.GPU() else null
val audioBackend = null
val cacheDir =
if (modelFile.absolutePath.startsWith("/data/local/tmp")) {
context.getExternalFilesDir(null)?.absolutePath
} else {
null
}
liteRtEngine = createLiteRtEngineWithFallbacks(
modelPath = modelFile.absolutePath,
backend = liteRtBackend,
cacheDir = context.cacheDir.absolutePath
preferredBackend = preferredBackend,
preferredVisionBackend = preferredVisionBackend,
audioBackend = audioBackend,
cacheDir = cacheDir
)
liteRtEngine = Engine(engineConfig).also { it.initialize() }
Log.d(TAG, "Offline model initialized with LiteRT-LM Engine backend=$backend")
Log.d(TAG, "Offline model initialized with LiteRT-LM Engine")
}
} else {
if (llmInference == null) {
Expand Down Expand Up @@ -373,6 +388,12 @@ class PhotoReasoningViewModel(
return null // Already initialized or no model file
} catch (e: Exception) {
Log.e(TAG, "Failed to initialize offline model", e)
Log.e(
TAG,
"Offline init context: model=${com.google.ai.sample.GenerativeAiViewModelFactory.getCurrentModel()}, " +
"preferredBackend=${GenerativeAiViewModelFactory.getBackend()}, " +
"abis=${Build.SUPPORTED_ABIS?.joinToString() ?: "unknown"}"
)
val msg = e.message ?: e.toString()
if (msg.contains("nativeCheckLoaded", ignoreCase = true) ||
msg.contains("No implementation found", ignoreCase = true) ||
Expand All @@ -392,6 +413,63 @@ class PhotoReasoningViewModel(
val supportedAbis = Build.SUPPORTED_ABIS?.toSet().orEmpty()
return supportedAbis.contains("arm64-v8a") || supportedAbis.contains("x86_64")
}

private fun createLiteRtEngineWithFallbacks(
modelPath: String,
preferredBackend: Backend,
preferredVisionBackend: Backend?,
audioBackend: Backend?,
cacheDir: String?
): Engine {
val cpuBackend = Backend.CPU()
val gpuBackend = Backend.GPU()
val attempts = linkedSetOf(
preferredBackend to preferredVisionBackend,
cpuBackend to preferredVisionBackend,
cpuBackend to cpuBackend,
gpuBackend to cpuBackend
)
var lastError: Exception? = null
val failureDetails = StringBuilder()

attempts.forEachIndexed { index, (backendAttempt, visionAttempt) ->
try {
Log.i(
TAG,
"LiteRT init attempt ${index + 1}/${attempts.size}: " +
"backend=$backendAttempt visionBackend=$visionAttempt audioBackend=$audioBackend cacheDir=$cacheDir"
)
val config = EngineConfig(
modelPath = modelPath,
backend = backendAttempt,
visionBackend = visionAttempt,
audioBackend = audioBackend,
maxNumTokens = null,
cacheDir = cacheDir
)
return Engine(config).also { it.initialize() }
} catch (e: Exception) {
lastError = e
val msg = e.message ?: e.toString()
failureDetails
.append("Attempt ")
.append(index + 1)
.append(" failed (backend=")
.append(backendAttempt)
.append(", visionBackend=")
.append(visionAttempt)
.append("): ")
.append(msg)
.append('\n')
Log.w(TAG, "LiteRT init attempt ${index + 1} failed", e)
}
}

throw IllegalStateException(
"All LiteRT initialization attempts failed.\n$failureDetails",
lastError
)
}

fun reinitializeOfflineModel(context: Context) {
viewModelScope.launch(Dispatchers.IO) {
Expand Down
2 changes: 1 addition & 1 deletion build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ buildscript {
}
// Top-level build file where you can add configuration options common to all sub-projects/modules.
plugins {
id("com.android.application") version "8.1.3" apply false
id("com.android.application") version "8.8.2" apply false
id("org.jetbrains.kotlin.android") version "1.9.20" apply false
id("com.google.android.libraries.mapsplatform.secrets-gradle-plugin") version "2.0.1" apply false
id("com.google.gms.google-services") version "4.4.2" apply false
Expand Down
2 changes: 1 addition & 1 deletion gradle/wrapper/gradle-wrapper.properties
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.4-bin.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-8.10.2-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
Loading