diff --git a/app/build.gradle.kts b/app/build.gradle.kts index 7ee2a336..4df88cdc 100644 --- a/app/build.gradle.kts +++ b/app/build.gradle.kts @@ -111,7 +111,7 @@ dependencies { // MediaPipe GenAI for offline inference (LLM) implementation("com.google.mediapipe:tasks-genai:0.10.32") // LiteRT-LM for newer offline .litertlm models (e.g. Gemma 4 E4B it) - implementation("com.google.ai.edge.litertlm:litertlm-android:0.0.0-alpha06") + implementation("com.google.ai.edge.litertlm:litertlm-android:0.10.0") // Camera Core to potentially fix missing JNI lib issue implementation("androidx.camera:camera-core:1.4.0") diff --git a/app/src/main/kotlin/com/google/ai/sample/feature/multimodal/PhotoReasoningViewModel.kt b/app/src/main/kotlin/com/google/ai/sample/feature/multimodal/PhotoReasoningViewModel.kt index d816929a..a059839e 100644 --- a/app/src/main/kotlin/com/google/ai/sample/feature/multimodal/PhotoReasoningViewModel.kt +++ b/app/src/main/kotlin/com/google/ai/sample/feature/multimodal/PhotoReasoningViewModel.kt @@ -337,15 +337,30 @@ class PhotoReasoningViewModel( if (!isLiteRtAbiSupported()) { return "Gemma 4 offline is only supported on arm64-v8a or x86_64 devices." } + Log.i( + TAG, + "Initializing Gemma 4 LiteRT engine. preferredBackend=$backend, " + + "abis=${Build.SUPPORTED_ABIS?.joinToString() ?: "unknown"}, " + + "modelPath=${modelFile.absolutePath}, modelSizeBytes=${modelFile.length()}" + ) if (liteRtEngine == null) { - val liteRtBackend = if (backend == InferenceBackend.GPU) Backend.GPU else Backend.CPU - val engineConfig = EngineConfig( + val preferredBackend = if (backend == InferenceBackend.GPU) Backend.GPU() else Backend.CPU() + val preferredVisionBackend = if (currentModel.supportsScreenshot) Backend.GPU() else null + val audioBackend = null + val cacheDir = + if (modelFile.absolutePath.startsWith("/data/local/tmp")) { + context.getExternalFilesDir(null)?.absolutePath + } else { + null + } + liteRtEngine = createLiteRtEngineWithFallbacks( modelPath = modelFile.absolutePath, - backend = liteRtBackend, - cacheDir = context.cacheDir.absolutePath + preferredBackend = preferredBackend, + preferredVisionBackend = preferredVisionBackend, + audioBackend = audioBackend, + cacheDir = cacheDir ) - liteRtEngine = Engine(engineConfig).also { it.initialize() } - Log.d(TAG, "Offline model initialized with LiteRT-LM Engine backend=$backend") + Log.d(TAG, "Offline model initialized with LiteRT-LM Engine") } } else { if (llmInference == null) { @@ -373,6 +388,12 @@ class PhotoReasoningViewModel( return null // Already initialized or no model file } catch (e: Exception) { Log.e(TAG, "Failed to initialize offline model", e) + Log.e( + TAG, + "Offline init context: model=${com.google.ai.sample.GenerativeAiViewModelFactory.getCurrentModel()}, " + + "preferredBackend=${GenerativeAiViewModelFactory.getBackend()}, " + + "abis=${Build.SUPPORTED_ABIS?.joinToString() ?: "unknown"}" + ) val msg = e.message ?: e.toString() if (msg.contains("nativeCheckLoaded", ignoreCase = true) || msg.contains("No implementation found", ignoreCase = true) || @@ -392,6 +413,63 @@ class PhotoReasoningViewModel( val supportedAbis = Build.SUPPORTED_ABIS?.toSet().orEmpty() return supportedAbis.contains("arm64-v8a") || supportedAbis.contains("x86_64") } + + private fun createLiteRtEngineWithFallbacks( + modelPath: String, + preferredBackend: Backend, + preferredVisionBackend: Backend?, + audioBackend: Backend?, + cacheDir: String? + ): Engine { + val cpuBackend = Backend.CPU() + val gpuBackend = Backend.GPU() + val attempts = linkedSetOf( + preferredBackend to preferredVisionBackend, + cpuBackend to preferredVisionBackend, + cpuBackend to cpuBackend, + gpuBackend to cpuBackend + ) + var lastError: Exception? = null + val failureDetails = StringBuilder() + + attempts.forEachIndexed { index, (backendAttempt, visionAttempt) -> + try { + Log.i( + TAG, + "LiteRT init attempt ${index + 1}/${attempts.size}: " + + "backend=$backendAttempt visionBackend=$visionAttempt audioBackend=$audioBackend cacheDir=$cacheDir" + ) + val config = EngineConfig( + modelPath = modelPath, + backend = backendAttempt, + visionBackend = visionAttempt, + audioBackend = audioBackend, + maxNumTokens = null, + cacheDir = cacheDir + ) + return Engine(config).also { it.initialize() } + } catch (e: Exception) { + lastError = e + val msg = e.message ?: e.toString() + failureDetails + .append("Attempt ") + .append(index + 1) + .append(" failed (backend=") + .append(backendAttempt) + .append(", visionBackend=") + .append(visionAttempt) + .append("): ") + .append(msg) + .append('\n') + Log.w(TAG, "LiteRT init attempt ${index + 1} failed", e) + } + } + + throw IllegalStateException( + "All LiteRT initialization attempts failed.\n$failureDetails", + lastError + ) + } fun reinitializeOfflineModel(context: Context) { viewModelScope.launch(Dispatchers.IO) { diff --git a/build.gradle.kts b/build.gradle.kts index a4a00dd5..d134f4e7 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -20,7 +20,7 @@ buildscript { } // Top-level build file where you can add configuration options common to all sub-projects/modules. plugins { - id("com.android.application") version "8.1.3" apply false + id("com.android.application") version "8.8.2" apply false id("org.jetbrains.kotlin.android") version "1.9.20" apply false id("com.google.android.libraries.mapsplatform.secrets-gradle-plugin") version "2.0.1" apply false id("com.google.gms.google-services") version "4.4.2" apply false diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index e411586a..1e2fbf0d 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,5 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-8.4-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.10.2-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists