UI: show a Snack bar to warn user that system prompt is not always supported

This commit is contained in:
Han Yin 2025-04-20 19:25:33 -07:00
parent 56a7272858
commit d1b018e375
3 changed files with 15 additions and 0 deletions

View File

@ -306,6 +306,7 @@ fun AppContent(
// Mode Selection Screen
composable(AppDestinations.MODEL_LOADING_ROUTE) {
ModelLoadingScreen(
onScaffoldEvent = handleScaffoldEvent,
onNavigateBack = { navigationActions.navigateUp() },
onNavigateToBenchmark = { navigationActions.navigateToBenchmark(it) },
onNavigateToConversation = { navigationActions.navigateToConversation(it) },

View File

@ -52,6 +52,7 @@ import com.example.llama.revamp.data.model.SystemPrompt
import com.example.llama.revamp.engine.ModelLoadingMetrics
import com.example.llama.revamp.ui.components.ModelCardCoreExpandable
import com.example.llama.revamp.ui.components.ModelUnloadDialogHandler
import com.example.llama.revamp.ui.components.ScaffoldEvent
import com.example.llama.revamp.viewmodel.ModelLoadingViewModel
@ -67,6 +68,7 @@ enum class SystemPromptTab(val label: String) {
@OptIn(ExperimentalMaterial3Api::class, ExperimentalFoundationApi::class)
@Composable
fun ModelLoadingScreen(
onScaffoldEvent: (ScaffoldEvent) -> Unit,
onNavigateBack: () -> Unit,
onNavigateToBenchmark: (ModelLoadingMetrics) -> Unit,
onNavigateToConversation: (ModelLoadingMetrics) -> Unit,
@ -83,6 +85,7 @@ fun ModelLoadingScreen(
var isModelCardExpanded by remember { mutableStateOf(false) }
var selectedMode by remember { mutableStateOf<Mode?>(null) }
var useSystemPrompt by remember { mutableStateOf(false) }
var showedSystemPromptWarning by remember { mutableStateOf(false) }
var selectedPrompt by remember { mutableStateOf<SystemPrompt?>(null) }
var selectedTab by remember { mutableStateOf(SystemPromptTab.PRESETS) }
var customPromptText by remember { mutableStateOf("") }
@ -216,6 +219,15 @@ fun ModelLoadingScreen(
Switch(
checked = useSystemPrompt,
onCheckedChange = {
// First show a warning message if not yet
if (!showedSystemPromptWarning) {
onScaffoldEvent(ScaffoldEvent.ShowSnackbar(
message = "Model may not support system prompt!\nProceed with caution.",
))
showedSystemPromptWarning = true
}
// Then update states
useSystemPrompt = it
if (it && selectedMode != Mode.CONVERSATION) {
selectedMode = Mode.CONVERSATION

View File

@ -116,6 +116,8 @@ class LLamaAndroid private constructor() : InferenceEngine {
/**
* Process the plain text system prompt
*
* TODO-han.yin: return error code if system prompt not correct processed?
*/
override suspend fun setSystemPrompt(prompt: String) =
withContext(llamaDispatcher) {