diff --git a/examples/llama.android/app/src/main/java/com/example/llama/revamp/MainActivity.kt b/examples/llama.android/app/src/main/java/com/example/llama/revamp/MainActivity.kt index 17f2923ba6..e1532039dd 100644 --- a/examples/llama.android/app/src/main/java/com/example/llama/revamp/MainActivity.kt +++ b/examples/llama.android/app/src/main/java/com/example/llama/revamp/MainActivity.kt @@ -306,6 +306,7 @@ fun AppContent( // Mode Selection Screen composable(AppDestinations.MODEL_LOADING_ROUTE) { ModelLoadingScreen( + onScaffoldEvent = handleScaffoldEvent, onNavigateBack = { navigationActions.navigateUp() }, onNavigateToBenchmark = { navigationActions.navigateToBenchmark(it) }, onNavigateToConversation = { navigationActions.navigateToConversation(it) }, diff --git a/examples/llama.android/app/src/main/java/com/example/llama/revamp/ui/screens/ModelLoadingScreen.kt b/examples/llama.android/app/src/main/java/com/example/llama/revamp/ui/screens/ModelLoadingScreen.kt index af554bdb78..e2f2a82ed4 100644 --- a/examples/llama.android/app/src/main/java/com/example/llama/revamp/ui/screens/ModelLoadingScreen.kt +++ b/examples/llama.android/app/src/main/java/com/example/llama/revamp/ui/screens/ModelLoadingScreen.kt @@ -52,6 +52,7 @@ import com.example.llama.revamp.data.model.SystemPrompt import com.example.llama.revamp.engine.ModelLoadingMetrics import com.example.llama.revamp.ui.components.ModelCardCoreExpandable import com.example.llama.revamp.ui.components.ModelUnloadDialogHandler +import com.example.llama.revamp.ui.components.ScaffoldEvent import com.example.llama.revamp.viewmodel.ModelLoadingViewModel @@ -67,6 +68,7 @@ enum class SystemPromptTab(val label: String) { @OptIn(ExperimentalMaterial3Api::class, ExperimentalFoundationApi::class) @Composable fun ModelLoadingScreen( + onScaffoldEvent: (ScaffoldEvent) -> Unit, onNavigateBack: () -> Unit, onNavigateToBenchmark: (ModelLoadingMetrics) -> Unit, onNavigateToConversation: (ModelLoadingMetrics) -> Unit, @@ -83,6 +85,7 @@ fun ModelLoadingScreen( var isModelCardExpanded by remember { mutableStateOf(false) } var selectedMode by remember { mutableStateOf(null) } var useSystemPrompt by remember { mutableStateOf(false) } + var showedSystemPromptWarning by remember { mutableStateOf(false) } var selectedPrompt by remember { mutableStateOf(null) } var selectedTab by remember { mutableStateOf(SystemPromptTab.PRESETS) } var customPromptText by remember { mutableStateOf("") } @@ -216,6 +219,15 @@ fun ModelLoadingScreen( Switch( checked = useSystemPrompt, onCheckedChange = { + // First show a warning message if not yet + if (!showedSystemPromptWarning) { + onScaffoldEvent(ScaffoldEvent.ShowSnackbar( + message = "Model may not support system prompt!\nProceed with caution.", + )) + showedSystemPromptWarning = true + } + + // Then update states useSystemPrompt = it if (it && selectedMode != Mode.CONVERSATION) { selectedMode = Mode.CONVERSATION diff --git a/examples/llama.android/llama/src/main/java/android/llama/cpp/LLamaAndroid.kt b/examples/llama.android/llama/src/main/java/android/llama/cpp/LLamaAndroid.kt index 6088ba5921..b0cebcec2f 100644 --- a/examples/llama.android/llama/src/main/java/android/llama/cpp/LLamaAndroid.kt +++ b/examples/llama.android/llama/src/main/java/android/llama/cpp/LLamaAndroid.kt @@ -116,6 +116,8 @@ class LLamaAndroid private constructor() : InferenceEngine { /** * Process the plain text system prompt + * + * TODO-han.yin: return error code if system prompt not correct processed? */ override suspend fun setSystemPrompt(prompt: String) = withContext(llamaDispatcher) {