Skip to content
Closed
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ import com.google.ai.sample.ApiProvider
import com.google.ai.edge.litertlm.Backend
import com.google.ai.edge.litertlm.Engine
import com.google.ai.edge.litertlm.EngineConfig
import com.google.ai.edge.litertlm.NativeLibraryLoader
import com.google.mediapipe.tasks.genai.llminference.LlmInference
import okhttp3.MediaType.Companion.toMediaType
import okhttp3.OkHttpClient
Expand Down Expand Up @@ -86,6 +87,7 @@ class PhotoReasoningViewModel(

private var llmInference: LlmInference? = null
private var liteRtEngine: Engine? = null
private var liteRtNativeLoaded = false
private val TAG = "PhotoReasoningViewModel"

// WebRTC & Signaling
Expand Down Expand Up @@ -337,6 +339,7 @@ class PhotoReasoningViewModel(
if (!isLiteRtAbiSupported()) {
return "Gemma 4 offline is only supported on arm64-v8a or x86_64 devices."
}
ensureLiteRtNativeLoaded()
if (liteRtEngine == null) {
val liteRtBackend = if (backend == InferenceBackend.GPU) Backend.GPU else Backend.CPU
val engineConfig = EngineConfig(
Expand Down Expand Up @@ -388,6 +391,18 @@ class PhotoReasoningViewModel(
}
}

private fun ensureLiteRtNativeLoaded() {
if (liteRtNativeLoaded) return

runCatching {
NativeLibraryLoader.INSTANCE.load()
}.recoverCatching {
System.loadLibrary("litertlm_jni")
}.getOrThrow()

liteRtNativeLoaded = true
}

private fun isLiteRtAbiSupported(): Boolean {
val supportedAbis = Build.SUPPORTED_ABIS?.toSet().orEmpty()
return supportedAbis.contains("arm64-v8a") || supportedAbis.contains("x86_64")
Expand Down
Loading