From 286ed05f132c659251dd4ee3f03054c4e78c0075 Mon Sep 17 00:00:00 2001 From: Han Yin Date: Tue, 15 Apr 2025 14:01:38 -0700 Subject: [PATCH] vm: merge SystemPromptViewModel into ModelLoadingViewModel --- .../revamp/ui/screens/ModelLoadingScreen.kt | 11 ++- .../revamp/viewmodel/ModelLoadingViewModel.kt | 77 +++++++++++++++++- .../revamp/viewmodel/SystemPromptViewModel.kt | 79 ------------------- 3 files changed, 80 insertions(+), 87 deletions(-) delete mode 100644 examples/llama.android/app/src/main/java/com/example/llama/revamp/viewmodel/SystemPromptViewModel.kt diff --git a/examples/llama.android/app/src/main/java/com/example/llama/revamp/ui/screens/ModelLoadingScreen.kt b/examples/llama.android/app/src/main/java/com/example/llama/revamp/ui/screens/ModelLoadingScreen.kt index f77194e143..51409a2aeb 100644 --- a/examples/llama.android/app/src/main/java/com/example/llama/revamp/ui/screens/ModelLoadingScreen.kt +++ b/examples/llama.android/app/src/main/java/com/example/llama/revamp/ui/screens/ModelLoadingScreen.kt @@ -52,7 +52,6 @@ import com.example.llama.revamp.data.model.SystemPrompt import com.example.llama.revamp.engine.InferenceEngine import com.example.llama.revamp.ui.components.PerformanceAppScaffold import com.example.llama.revamp.viewmodel.ModelLoadingViewModel -import com.example.llama.revamp.viewmodel.SystemPromptViewModel import kotlinx.coroutines.Job import kotlinx.coroutines.launch @@ -68,12 +67,12 @@ fun ModelLoadingScreen( onConversationSelected: (systemPrompt: String?, prepareJob: Job) -> Unit, onBackPressed: () -> Unit, modelLoadingViewModel: ModelLoadingViewModel = hiltViewModel(), - systemPromptViewModel: SystemPromptViewModel = hiltViewModel(), ) { val coroutineScope = rememberCoroutineScope() - val presetPrompts by systemPromptViewModel.presetPrompts.collectAsState() - val recentPrompts by systemPromptViewModel.recentPrompts.collectAsState() + val selectedModel by modelLoadingViewModel.selectedModel.collectAsState() // TODO-han.yin: USE IT + val presetPrompts by modelLoadingViewModel.presetPrompts.collectAsState() + val recentPrompts by modelLoadingViewModel.recentPrompts.collectAsState() var selectedMode by remember { mutableStateOf(null) } var useSystemPrompt by remember { mutableStateOf(false) } @@ -385,7 +384,7 @@ fun ModelLoadingScreen( SystemPromptTab.PRESETS, SystemPromptTab.RECENTS -> selectedPrompt?.let { prompt -> // Save the prompt to recent prompts database - systemPromptViewModel.savePromptToRecents(prompt) + modelLoadingViewModel.savePromptToRecents(prompt) prompt.content } @@ -393,7 +392,7 @@ fun ModelLoadingScreen( customPromptText.takeIf { it.isNotBlank() } ?.also { promptText -> // Save custom prompt to database - systemPromptViewModel.saveCustomPromptToRecents(promptText) + modelLoadingViewModel.saveCustomPromptToRecents(promptText) } } } else null diff --git a/examples/llama.android/app/src/main/java/com/example/llama/revamp/viewmodel/ModelLoadingViewModel.kt b/examples/llama.android/app/src/main/java/com/example/llama/revamp/viewmodel/ModelLoadingViewModel.kt index 2b84399fde..8d4fe42ba7 100644 --- a/examples/llama.android/app/src/main/java/com/example/llama/revamp/viewmodel/ModelLoadingViewModel.kt +++ b/examples/llama.android/app/src/main/java/com/example/llama/revamp/viewmodel/ModelLoadingViewModel.kt @@ -1,18 +1,84 @@ package com.example.llama.revamp.viewmodel import androidx.lifecycle.ViewModel +import androidx.lifecycle.viewModelScope +import com.example.llama.revamp.data.model.SystemPrompt +import com.example.llama.revamp.data.repository.SystemPromptRepository import com.example.llama.revamp.engine.InferenceManager import dagger.hilt.android.lifecycle.HiltViewModel +import kotlinx.coroutines.flow.SharingStarted +import kotlinx.coroutines.flow.StateFlow +import kotlinx.coroutines.flow.stateIn +import kotlinx.coroutines.launch import javax.inject.Inject @HiltViewModel class ModelLoadingViewModel @Inject constructor( - private val inferenceManager: InferenceManager + private val inferenceManager: InferenceManager, + private val repository: SystemPromptRepository ) : ViewModel() { - val engineState = inferenceManager.engineState + /** + * Currently selected model to be loaded + */ val selectedModel = inferenceManager.currentModel + /** + * Preset prompts + */ + val presetPrompts: StateFlow> = repository.getPresetPrompts() + .stateIn( + scope = viewModelScope, + started = SharingStarted.WhileSubscribed(SUBSCRIPTION_TIMEOUT_MS), + initialValue = emptyList() + ) + + /** + * Recent prompts + */ + val recentPrompts: StateFlow> = repository.getRecentPrompts() + .stateIn( + scope = viewModelScope, + started = SharingStarted.WhileSubscribed(SUBSCRIPTION_TIMEOUT_MS), + initialValue = emptyList() + ) + + /** + * Save a prompt to the recents list. + */ + fun savePromptToRecents(prompt: SystemPrompt) { + viewModelScope.launch { + repository.savePromptToRecents(prompt) + } + } + + /** + * Create and save a custom prompt. + */ + fun saveCustomPromptToRecents(content: String) { + viewModelScope.launch { + repository.saveCustomPrompt(content) + } + } + + /** + * Delete a prompt by ID. + */ + fun deletePrompt(id: String) { + viewModelScope.launch { + repository.deletePrompt(id) + } + } + + /** + * Clear all recent prompts. + */ + fun clearRecentPrompts() { + viewModelScope.launch { + repository.deleteAllPrompts() + } + } + /** * Prepares the engine for benchmark mode. */ @@ -24,4 +90,11 @@ class ModelLoadingViewModel @Inject constructor( */ suspend fun prepareForConversation(systemPrompt: String? = null) = inferenceManager.loadModelForConversation(systemPrompt) + + + companion object { + private val TAG = ModelLoadingViewModel::class.java.simpleName + + private const val SUBSCRIPTION_TIMEOUT_MS = 5000L + } } diff --git a/examples/llama.android/app/src/main/java/com/example/llama/revamp/viewmodel/SystemPromptViewModel.kt b/examples/llama.android/app/src/main/java/com/example/llama/revamp/viewmodel/SystemPromptViewModel.kt deleted file mode 100644 index 60ceb43be0..0000000000 --- a/examples/llama.android/app/src/main/java/com/example/llama/revamp/viewmodel/SystemPromptViewModel.kt +++ /dev/null @@ -1,79 +0,0 @@ -package com.example.llama.revamp.viewmodel - -import androidx.lifecycle.ViewModel -import androidx.lifecycle.viewModelScope -import com.example.llama.revamp.data.model.SystemPrompt -import com.example.llama.revamp.data.repository.SystemPromptRepository -import dagger.hilt.android.lifecycle.HiltViewModel -import kotlinx.coroutines.flow.SharingStarted -import kotlinx.coroutines.flow.StateFlow -import kotlinx.coroutines.flow.stateIn -import kotlinx.coroutines.launch -import javax.inject.Inject - -/** - * ViewModel for handling system prompts. - */ -@HiltViewModel -class SystemPromptViewModel @Inject constructor( - private val repository: SystemPromptRepository -) : ViewModel() { - - // Preset prompts - val presetPrompts: StateFlow> = repository.getPresetPrompts() - .stateIn( - scope = viewModelScope, - started = SharingStarted.WhileSubscribed(SUBSCRIPTION_TIMEOUT_MS), - initialValue = emptyList() - ) - - // Recent prompts - val recentPrompts: StateFlow> = repository.getRecentPrompts() - .stateIn( - scope = viewModelScope, - started = SharingStarted.WhileSubscribed(SUBSCRIPTION_TIMEOUT_MS), - initialValue = emptyList() - ) - - /** - * Save a prompt to the recents list. - */ - fun savePromptToRecents(prompt: SystemPrompt) { - viewModelScope.launch { - repository.savePromptToRecents(prompt) - } - } - - /** - * Create and save a custom prompt. - */ - fun saveCustomPromptToRecents(content: String) { - viewModelScope.launch { - repository.saveCustomPrompt(content) - } - } - - /** - * Delete a prompt by ID. - */ - fun deletePrompt(id: String) { - viewModelScope.launch { - repository.deletePrompt(id) - } - } - - /** - * Clear all recent prompts. - */ - fun clearRecentPrompts() { - viewModelScope.launch { - repository.deleteAllPrompts() - } - } - - companion object { - private val TAG = SystemPromptViewModel::class.java.simpleName - - private const val SUBSCRIPTION_TIMEOUT_MS = 5000L - } -}