Skip to content

Commit 994f4d2

Browse files
Add Qwen3.5 4B offline model package support
1 parent e723057 commit 994f4d2

3 files changed

Lines changed: 155 additions & 52 deletions

File tree

app/src/main/kotlin/com/google/ai/sample/GenerativeAiViewModelFactory.kt

Lines changed: 26 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,9 @@ enum class ModelOption(
2929
val size: String? = null,
3030
val supportsScreenshot: Boolean = true,
3131
val isOfflineModel: Boolean = false,
32-
val offlineModelFilename: String? = null
32+
val offlineModelFilename: String? = null,
33+
val offlineRequiredFilenames: List<String> = emptyList(),
34+
val additionalDownloadUrls: List<String> = emptyList()
3335
) {
3436
PUTER_GLM5("GLM-5V Turbo (Puter)", "openrouter:z-ai/glm-5v-turbo", ApiProvider.PUTER, supportsScreenshot = true),
3537
MISTRAL_LARGE_3("Mistral Large 3", "mistral-large-latest", ApiProvider.MISTRAL),
@@ -53,15 +55,36 @@ enum class ModelOption(
5355
"https://huggingface.co/na5h13/gemma-3n-E4B-it-litert-lm/resolve/main/gemma-3n-E4B-it-int4.litertlm?download=true",
5456
"4.92 GB",
5557
isOfflineModel = true,
56-
offlineModelFilename = "gemma-3n-e4b-it-int4.litertlm"
58+
offlineModelFilename = "gemma-3n-e4b-it-int4.litertlm",
59+
offlineRequiredFilenames = listOf("gemma-3n-e4b-it-int4.litertlm")
5760
),
5861
GEMMA_4_E4B_IT(
5962
"Gemma 4 E4B it (offline)",
6063
"gemma-4-e4b-it",
6164
ApiProvider.GOOGLE,
6265
"https://huggingface.co/litert-community/gemma-4-E4B-it-litert-lm/resolve/main/gemma-4-E4B-it.litertlm?download=true",
66+
"3.40 GB",
6367
isOfflineModel = true,
64-
offlineModelFilename = "gemma-4-E4B-it.litertlm"
68+
offlineModelFilename = "gemma-4-E4B-it.litertlm",
69+
offlineRequiredFilenames = listOf("gemma-4-E4B-it.litertlm")
70+
),
71+
QWEN3_5_4B_OFFLINE(
72+
"Qwen3.5 4B (offline)",
73+
"qwen3.5-4b-offline",
74+
ApiProvider.GOOGLE,
75+
"https://huggingface.co/Yoursmiling/Qwen3.5-4B-LiteRT/resolve/main/model_quantized.litertlm?download=true",
76+
"4.27 GB",
77+
isOfflineModel = true,
78+
offlineModelFilename = "model_quantized.litertlm",
79+
offlineRequiredFilenames = listOf(
80+
"model_quantized.litertlm",
81+
"sentencepiece.model"
82+
),
83+
additionalDownloadUrls = listOf(
84+
"https://huggingface.co/Yoursmiling/Qwen3.5-4B-LiteRT/resolve/main/sentencepiece.model?download=true",
85+
"https://huggingface.co/Yoursmiling/Qwen3.5-4B-LiteRT/resolve/main/tokenizer.json?download=true",
86+
"https://huggingface.co/Yoursmiling/Qwen3.5-4B-LiteRT/resolve/main/tokenizer_config.json?download=true"
87+
)
6588
),
6689
HUMAN_EXPERT("Human Expert", "human-expert", ApiProvider.HUMAN_EXPERT);
6790

app/src/main/kotlin/com/google/ai/sample/feature/multimodal/ModelDownloadManager.kt

Lines changed: 116 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -58,9 +58,16 @@ object ModelDownloadManager {
5858
private var downloadJob: Job? = null
5959
private var isPaused = false
6060

61+
private data class DownloadTarget(
62+
val finalFile: File,
63+
val tempFile: File,
64+
val url: String,
65+
val label: String
66+
)
67+
6168
fun isModelDownloaded(context: Context, model: ModelOption = GenerativeAiViewModelFactory.getCurrentModel()): Boolean {
62-
val file = getModelFile(context, model)
63-
return file != null && file.exists() && file.length() > 0
69+
val required = getRequiredFiles(context, model)
70+
return required.isNotEmpty() && required.all { it.exists() && it.length() > 0 }
6471
}
6572

6673
fun getModelFile(context: Context, model: ModelOption = GenerativeAiViewModelFactory.getCurrentModel()): File? {
@@ -74,13 +81,26 @@ object ModelDownloadManager {
7481
}
7582
}
7683

77-
private fun getTempFile(context: Context, model: ModelOption): File? {
78-
val modelFilename = model.offlineModelFilename ?: return null
79-
val externalFilesDir = context.getExternalFilesDir(null)
80-
return if (externalFilesDir != null) {
81-
File(externalFilesDir, modelFilename + TEMP_SUFFIX)
84+
private fun getRequiredFiles(context: Context, model: ModelOption): List<File> {
85+
val externalFilesDir = context.getExternalFilesDir(null) ?: return emptyList()
86+
val requiredNames = if (model.offlineRequiredFilenames.isNotEmpty()) {
87+
model.offlineRequiredFilenames
8288
} else {
83-
null
89+
listOfNotNull(model.offlineModelFilename)
90+
}
91+
return requiredNames.map { File(externalFilesDir, it) }
92+
}
93+
94+
fun getMissingRequiredFiles(context: Context, model: ModelOption): List<String> {
95+
val externalFilesDir = context.getExternalFilesDir(null) ?: return model.offlineRequiredFilenames
96+
val requiredNames = if (model.offlineRequiredFilenames.isNotEmpty()) {
97+
model.offlineRequiredFilenames
98+
} else {
99+
listOfNotNull(model.offlineModelFilename)
100+
}
101+
return requiredNames.filter { name ->
102+
val f = File(externalFilesDir, name)
103+
!f.exists() || f.length() <= 0
84104
}
85105
}
86106

@@ -147,7 +167,7 @@ object ModelDownloadManager {
147167

148168
isPaused = false
149169
downloadJob = CoroutineScope(Dispatchers.IO).launch {
150-
downloadWithResume(context, model, url)
170+
downloadModelPackage(context, model, url)
151171
}
152172
}
153173

@@ -164,7 +184,7 @@ object ModelDownloadManager {
164184

165185
isPaused = false
166186
downloadJob = CoroutineScope(Dispatchers.IO).launch {
167-
downloadWithResume(context, model, url)
187+
downloadModelPackage(context, model, url)
168188
}
169189
}
170190

@@ -174,11 +194,16 @@ object ModelDownloadManager {
174194
downloadJob?.cancel()
175195
downloadJob = null
176196

177-
// Delete temp file
178-
val tempFile = getTempFile(context, model)
179-
if (tempFile != null && tempFile.exists()) {
180-
tempFile.delete()
181-
Log.d(TAG, "Temp file deleted.")
197+
// Delete temp files for full package
198+
val externalFilesDir = context.getExternalFilesDir(null)
199+
if (externalFilesDir != null) {
200+
val targets = buildDownloadTargets(context, model, model.downloadUrl ?: "")
201+
targets.forEach { target ->
202+
if (target.tempFile.exists()) {
203+
target.tempFile.delete()
204+
}
205+
}
206+
Log.d(TAG, "Temporary package files deleted.")
182207
}
183208

184209
_downloadState.value = DownloadState.Idle
@@ -188,21 +213,79 @@ object ModelDownloadManager {
188213
}
189214
}
190215

191-
private suspend fun downloadWithResume(context: Context, model: ModelOption, url: String) {
192-
val tempFile = getTempFile(context, model) ?: run {
216+
private suspend fun downloadModelPackage(context: Context, model: ModelOption, primaryUrl: String) {
217+
val targets = buildDownloadTargets(context, model, primaryUrl)
218+
if (targets.isEmpty()) {
193219
_downloadState.value = DownloadState.Error("Storage not available.")
194220
return
195221
}
196-
val finalFile = getModelFile(context, model) ?: run {
197-
_downloadState.value = DownloadState.Error("Storage not available.")
198-
return
222+
223+
for ((index, target) in targets.withIndex()) {
224+
if (!coroutineContext.isActive) return
225+
Log.i(TAG, "Downloading package file ${index + 1}/${targets.size}: ${target.label}")
226+
val error = downloadSingleFileWithResume(context, target, index, targets.size)
227+
if (error != null) {
228+
_downloadState.value = DownloadState.Error(error)
229+
cancelDownloadNotification(context)
230+
return
231+
}
232+
}
233+
234+
_downloadState.value = DownloadState.Completed
235+
showDownloadCompleteNotification(context)
236+
withContext(Dispatchers.Main) {
237+
Toast.makeText(context, "Model download complete!", Toast.LENGTH_SHORT).show()
238+
}
239+
}
240+
241+
private fun buildDownloadTargets(context: Context, model: ModelOption, primaryUrl: String): List<DownloadTarget> {
242+
val externalFilesDir = context.getExternalFilesDir(null) ?: return emptyList()
243+
val primaryFilename = model.offlineModelFilename ?: return emptyList()
244+
val urls = listOf(primaryUrl) + model.additionalDownloadUrls
245+
val filenames = urls.mapIndexedNotNull { idx, url ->
246+
if (idx == 0) primaryFilename else filenameFromUrl(url)
247+
}
248+
if (urls.size != filenames.size) {
249+
Log.e(TAG, "Could not resolve filename for at least one download URL.")
250+
return emptyList()
251+
}
252+
return urls.zip(filenames).map { (url, filename) ->
253+
val finalFile = File(externalFilesDir, filename)
254+
DownloadTarget(
255+
finalFile = finalFile,
256+
tempFile = File(externalFilesDir, "$filename$TEMP_SUFFIX"),
257+
url = url,
258+
label = filename
259+
)
260+
}
261+
}
262+
263+
private fun filenameFromUrl(url: String): String? {
264+
val clean = url.substringBefore('?')
265+
val slash = clean.lastIndexOf('/')
266+
return if (slash >= 0 && slash + 1 < clean.length) clean.substring(slash + 1) else null
267+
}
268+
269+
private suspend fun downloadSingleFileWithResume(
270+
context: Context,
271+
target: DownloadTarget,
272+
fileIndex: Int,
273+
fileCount: Int
274+
): String? {
275+
val tempFile = target.tempFile
276+
val finalFile = target.finalFile
277+
val url = target.url
278+
279+
if (finalFile.exists() && finalFile.length() > 0L) {
280+
Log.d(TAG, "Skipping already downloaded file: ${target.label}")
281+
return null
199282
}
200283

201284
var retryCount = 0
202285
var bytesDownloaded = if (tempFile.exists()) tempFile.length() else 0L
203286

204287
while (retryCount <= MAX_RETRIES) {
205-
if (!coroutineContext.isActive) return // Coroutine was cancelled
288+
if (!coroutineContext.isActive) return null // Coroutine was cancelled
206289

207290
var connection: HttpURLConnection? = null
208291
try {
@@ -240,9 +323,7 @@ object ModelDownloadManager {
240323
}
241324
}
242325
else -> {
243-
_downloadState.value = DownloadState.Error("Server error: $responseCode")
244-
cancelDownloadNotification(context)
245-
return
326+
return "Server error for ${target.label}: $responseCode"
246327
}
247328
}
248329

@@ -264,7 +345,7 @@ object ModelDownloadManager {
264345
if (!coroutineContext.isActive) {
265346
Log.d(TAG, "Download cancelled during read.")
266347
cancelDownloadNotification(context)
267-
return
348+
return null
268349
}
269350

270351
if (isPaused) {
@@ -275,7 +356,7 @@ object ModelDownloadManager {
275356
)
276357
// Keep notification showing paused state
277358
showDownloadNotification(context, bytesDownloaded.toFloat() / totalBytes, bytesDownloaded, totalBytes)
278-
return
359+
return null
279360
}
280361

281362
output.write(buffer, 0, bytesRead)
@@ -286,13 +367,14 @@ object ModelDownloadManager {
286367
if (now - lastProgressUpdate >= PROGRESS_UPDATE_INTERVAL_MS) {
287368
lastProgressUpdate = now
288369
val progress = if (totalBytes > 0) bytesDownloaded.toFloat() / totalBytes else 0f
370+
val aggregateProgress = (fileIndex + progress) / fileCount.toFloat()
289371
_downloadState.value = DownloadState.Downloading(
290-
progress = progress,
372+
progress = aggregateProgress,
291373
bytesDownloaded = bytesDownloaded,
292374
totalBytes = totalBytes
293375
)
294376
// Point 18: Update notification with progress
295-
showDownloadNotification(context, progress, bytesDownloaded, totalBytes)
377+
showDownloadNotification(context, aggregateProgress, bytesDownloaded, totalBytes)
296378
}
297379
}
298380
}
@@ -303,30 +385,20 @@ object ModelDownloadManager {
303385
finalFile.delete()
304386
if (tempFile.renameTo(finalFile)) {
305387
Log.i(TAG, "Download complete! File: ${finalFile.absolutePath} (${finalFile.length()} bytes)")
306-
_downloadState.value = DownloadState.Completed
307-
showDownloadCompleteNotification(context)
308-
withContext(Dispatchers.Main) {
309-
Toast.makeText(context, "Model download complete!", Toast.LENGTH_SHORT).show()
310-
}
311388
} else {
312-
_downloadState.value = DownloadState.Error("Failed to save model file.")
313-
cancelDownloadNotification(context)
389+
return "Failed to save ${target.label}."
314390
}
315391
}
316-
return // Success, exit retry loop
392+
return null // Success, exit retry loop
317393

318394
} catch (e: IOException) {
319395
Log.e(TAG, "Download error (attempt ${retryCount + 1}): ${e.message}")
320396
retryCount++
321397
if (retryCount > MAX_RETRIES) {
322-
_downloadState.value = DownloadState.Error("Download failed after $MAX_RETRIES retries: ${e.message}")
323-
cancelDownloadNotification(context)
324-
withContext(Dispatchers.Main) {
325-
Toast.makeText(context, "Download failed: ${e.message}", Toast.LENGTH_LONG).show()
326-
}
398+
return "Download failed for ${target.label} after $MAX_RETRIES retries: ${e.message}"
327399
} else {
328400
_downloadState.value = DownloadState.Downloading(
329-
progress = if (bytesDownloaded > 0) 0f else 0f,
401+
progress = fileIndex.toFloat() / fileCount.toFloat(),
330402
bytesDownloaded = bytesDownloaded,
331403
totalBytes = -1
332404
)
@@ -337,6 +409,8 @@ object ModelDownloadManager {
337409
connection?.disconnect()
338410
}
339411
}
412+
413+
return "Download failed for ${target.label}."
340414
}
341415

342416
/**

app/src/main/kotlin/com/google/ai/sample/feature/multimodal/PhotoReasoningViewModel.kt

Lines changed: 13 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -327,19 +327,24 @@ class PhotoReasoningViewModel(
327327
private fun initializeOfflineModel(context: Context): String? {
328328
try {
329329
val currentModel = com.google.ai.sample.GenerativeAiViewModelFactory.getCurrentModel()
330+
val missingFiles = ModelDownloadManager.getMissingRequiredFiles(context, currentModel)
331+
if (missingFiles.isNotEmpty()) {
332+
return "Offline model files missing: ${missingFiles.joinToString(", ")}. Please redownload the model package."
333+
}
330334
val modelFile = ModelDownloadManager.getModelFile(context, currentModel)
331335
if (modelFile != null && modelFile.exists()) {
332336
// Load backend preference
333337
GenerativeAiViewModelFactory.loadBackendPreference(context)
334338
val backend = GenerativeAiViewModelFactory.getBackend()
339+
val isLiteRtModel = currentModel.offlineModelFilename?.endsWith(".litertlm", ignoreCase = true) == true
335340

336-
if (currentModel == ModelOption.GEMMA_4_E4B_IT) {
341+
if (isLiteRtModel) {
337342
if (!isLiteRtAbiSupported()) {
338-
return "Gemma 4 offline is only supported on arm64-v8a or x86_64 devices."
343+
return "Offline LiteRT models are only supported on arm64-v8a or x86_64 devices."
339344
}
340345
Log.i(
341346
TAG,
342-
"Initializing Gemma 4 LiteRT engine. preferredBackend=$backend, " +
347+
"Initializing LiteRT engine for ${currentModel.displayName}. preferredBackend=$backend, " +
343348
"abis=${Build.SUPPORTED_ABIS?.joinToString() ?: "unknown"}, " +
344349
"modelPath=${modelFile.absolutePath}, modelSizeBytes=${modelFile.length()}"
345350
)
@@ -835,7 +840,8 @@ class PhotoReasoningViewModel(
835840
// Initialize model if needed
836841
var initError: String? = null
837842
val selectedOfflineModel = GenerativeAiViewModelFactory.getCurrentModel()
838-
if (selectedOfflineModel == ModelOption.GEMMA_4_E4B_IT) {
843+
val useLiteRt = selectedOfflineModel.offlineModelFilename?.endsWith(".litertlm", ignoreCase = true) == true
844+
if (useLiteRt) {
839845
if (liteRtEngine == null) {
840846
withContext(Dispatchers.Main) {
841847
replaceAiMessageText("Initializing offline model...", isPending = true)
@@ -860,7 +866,7 @@ class PhotoReasoningViewModel(
860866
_isInitializingOfflineModelFlow.value = false
861867
}
862868

863-
if (selectedOfflineModel == ModelOption.GEMMA_4_E4B_IT && liteRtEngine == null) {
869+
if (useLiteRt && liteRtEngine == null) {
864870
val errorMsg = initError ?: "Offline model could not be initialized."
865871
withContext(Dispatchers.Main) {
866872
_uiState.value = PhotoReasoningUiState.Error(errorMsg)
@@ -875,7 +881,7 @@ class PhotoReasoningViewModel(
875881
refreshStopButtonState()
876882
}
877883
return@launch
878-
} else if (selectedOfflineModel != ModelOption.GEMMA_4_E4B_IT && llmInference == null) {
884+
} else if (!useLiteRt && llmInference == null) {
879885
val errorMsg = initError ?: "Offline model could not be initialized."
880886
withContext(Dispatchers.Main) {
881887
_uiState.value = PhotoReasoningUiState.Error(errorMsg)
@@ -896,7 +902,7 @@ class PhotoReasoningViewModel(
896902

897903
Log.d(TAG, "Sending streaming prompt to offline model (length: ${fullPrompt.length})")
898904

899-
val finalResponse = if (selectedOfflineModel == ModelOption.GEMMA_4_E4B_IT) {
905+
val finalResponse = if (useLiteRt) {
900906
val engine = liteRtEngine
901907
if (engine == null) {
902908
withContext(Dispatchers.Main) {

0 commit comments

Comments
 (0)