vm: merge SystemPromptViewModel into ModelLoadingViewModel
This commit is contained in:
parent
23d411d86e
commit
286ed05f13
|
|
@ -52,7 +52,6 @@ import com.example.llama.revamp.data.model.SystemPrompt
|
|||
import com.example.llama.revamp.engine.InferenceEngine
|
||||
import com.example.llama.revamp.ui.components.PerformanceAppScaffold
|
||||
import com.example.llama.revamp.viewmodel.ModelLoadingViewModel
|
||||
import com.example.llama.revamp.viewmodel.SystemPromptViewModel
|
||||
import kotlinx.coroutines.Job
|
||||
import kotlinx.coroutines.launch
|
||||
|
||||
|
|
@ -68,12 +67,12 @@ fun ModelLoadingScreen(
|
|||
onConversationSelected: (systemPrompt: String?, prepareJob: Job) -> Unit,
|
||||
onBackPressed: () -> Unit,
|
||||
modelLoadingViewModel: ModelLoadingViewModel = hiltViewModel(),
|
||||
systemPromptViewModel: SystemPromptViewModel = hiltViewModel(),
|
||||
) {
|
||||
val coroutineScope = rememberCoroutineScope()
|
||||
|
||||
val presetPrompts by systemPromptViewModel.presetPrompts.collectAsState()
|
||||
val recentPrompts by systemPromptViewModel.recentPrompts.collectAsState()
|
||||
val selectedModel by modelLoadingViewModel.selectedModel.collectAsState() // TODO-han.yin: USE IT
|
||||
val presetPrompts by modelLoadingViewModel.presetPrompts.collectAsState()
|
||||
val recentPrompts by modelLoadingViewModel.recentPrompts.collectAsState()
|
||||
|
||||
var selectedMode by remember { mutableStateOf<Mode?>(null) }
|
||||
var useSystemPrompt by remember { mutableStateOf(false) }
|
||||
|
|
@ -385,7 +384,7 @@ fun ModelLoadingScreen(
|
|||
SystemPromptTab.PRESETS, SystemPromptTab.RECENTS ->
|
||||
selectedPrompt?.let { prompt ->
|
||||
// Save the prompt to recent prompts database
|
||||
systemPromptViewModel.savePromptToRecents(prompt)
|
||||
modelLoadingViewModel.savePromptToRecents(prompt)
|
||||
prompt.content
|
||||
}
|
||||
|
||||
|
|
@ -393,7 +392,7 @@ fun ModelLoadingScreen(
|
|||
customPromptText.takeIf { it.isNotBlank() }
|
||||
?.also { promptText ->
|
||||
// Save custom prompt to database
|
||||
systemPromptViewModel.saveCustomPromptToRecents(promptText)
|
||||
modelLoadingViewModel.saveCustomPromptToRecents(promptText)
|
||||
}
|
||||
}
|
||||
} else null
|
||||
|
|
|
|||
|
|
@ -1,18 +1,84 @@
|
|||
package com.example.llama.revamp.viewmodel
|
||||
|
||||
import androidx.lifecycle.ViewModel
|
||||
import androidx.lifecycle.viewModelScope
|
||||
import com.example.llama.revamp.data.model.SystemPrompt
|
||||
import com.example.llama.revamp.data.repository.SystemPromptRepository
|
||||
import com.example.llama.revamp.engine.InferenceManager
|
||||
import dagger.hilt.android.lifecycle.HiltViewModel
|
||||
import kotlinx.coroutines.flow.SharingStarted
|
||||
import kotlinx.coroutines.flow.StateFlow
|
||||
import kotlinx.coroutines.flow.stateIn
|
||||
import kotlinx.coroutines.launch
|
||||
import javax.inject.Inject
|
||||
|
||||
@HiltViewModel
|
||||
class ModelLoadingViewModel @Inject constructor(
|
||||
private val inferenceManager: InferenceManager
|
||||
private val inferenceManager: InferenceManager,
|
||||
private val repository: SystemPromptRepository
|
||||
) : ViewModel() {
|
||||
|
||||
val engineState = inferenceManager.engineState
|
||||
/**
|
||||
* Currently selected model to be loaded
|
||||
*/
|
||||
val selectedModel = inferenceManager.currentModel
|
||||
|
||||
/**
|
||||
* Preset prompts
|
||||
*/
|
||||
val presetPrompts: StateFlow<List<SystemPrompt>> = repository.getPresetPrompts()
|
||||
.stateIn(
|
||||
scope = viewModelScope,
|
||||
started = SharingStarted.WhileSubscribed(SUBSCRIPTION_TIMEOUT_MS),
|
||||
initialValue = emptyList()
|
||||
)
|
||||
|
||||
/**
|
||||
* Recent prompts
|
||||
*/
|
||||
val recentPrompts: StateFlow<List<SystemPrompt>> = repository.getRecentPrompts()
|
||||
.stateIn(
|
||||
scope = viewModelScope,
|
||||
started = SharingStarted.WhileSubscribed(SUBSCRIPTION_TIMEOUT_MS),
|
||||
initialValue = emptyList()
|
||||
)
|
||||
|
||||
/**
|
||||
* Save a prompt to the recents list.
|
||||
*/
|
||||
fun savePromptToRecents(prompt: SystemPrompt) {
|
||||
viewModelScope.launch {
|
||||
repository.savePromptToRecents(prompt)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create and save a custom prompt.
|
||||
*/
|
||||
fun saveCustomPromptToRecents(content: String) {
|
||||
viewModelScope.launch {
|
||||
repository.saveCustomPrompt(content)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a prompt by ID.
|
||||
*/
|
||||
fun deletePrompt(id: String) {
|
||||
viewModelScope.launch {
|
||||
repository.deletePrompt(id)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all recent prompts.
|
||||
*/
|
||||
fun clearRecentPrompts() {
|
||||
viewModelScope.launch {
|
||||
repository.deleteAllPrompts()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares the engine for benchmark mode.
|
||||
*/
|
||||
|
|
@ -24,4 +90,11 @@ class ModelLoadingViewModel @Inject constructor(
|
|||
*/
|
||||
suspend fun prepareForConversation(systemPrompt: String? = null) =
|
||||
inferenceManager.loadModelForConversation(systemPrompt)
|
||||
|
||||
|
||||
companion object {
|
||||
private val TAG = ModelLoadingViewModel::class.java.simpleName
|
||||
|
||||
private const val SUBSCRIPTION_TIMEOUT_MS = 5000L
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,79 +0,0 @@
|
|||
package com.example.llama.revamp.viewmodel
|
||||
|
||||
import androidx.lifecycle.ViewModel
|
||||
import androidx.lifecycle.viewModelScope
|
||||
import com.example.llama.revamp.data.model.SystemPrompt
|
||||
import com.example.llama.revamp.data.repository.SystemPromptRepository
|
||||
import dagger.hilt.android.lifecycle.HiltViewModel
|
||||
import kotlinx.coroutines.flow.SharingStarted
|
||||
import kotlinx.coroutines.flow.StateFlow
|
||||
import kotlinx.coroutines.flow.stateIn
|
||||
import kotlinx.coroutines.launch
|
||||
import javax.inject.Inject
|
||||
|
||||
/**
|
||||
* ViewModel for handling system prompts.
|
||||
*/
|
||||
@HiltViewModel
|
||||
class SystemPromptViewModel @Inject constructor(
|
||||
private val repository: SystemPromptRepository
|
||||
) : ViewModel() {
|
||||
|
||||
// Preset prompts
|
||||
val presetPrompts: StateFlow<List<SystemPrompt>> = repository.getPresetPrompts()
|
||||
.stateIn(
|
||||
scope = viewModelScope,
|
||||
started = SharingStarted.WhileSubscribed(SUBSCRIPTION_TIMEOUT_MS),
|
||||
initialValue = emptyList()
|
||||
)
|
||||
|
||||
// Recent prompts
|
||||
val recentPrompts: StateFlow<List<SystemPrompt>> = repository.getRecentPrompts()
|
||||
.stateIn(
|
||||
scope = viewModelScope,
|
||||
started = SharingStarted.WhileSubscribed(SUBSCRIPTION_TIMEOUT_MS),
|
||||
initialValue = emptyList()
|
||||
)
|
||||
|
||||
/**
|
||||
* Save a prompt to the recents list.
|
||||
*/
|
||||
fun savePromptToRecents(prompt: SystemPrompt) {
|
||||
viewModelScope.launch {
|
||||
repository.savePromptToRecents(prompt)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create and save a custom prompt.
|
||||
*/
|
||||
fun saveCustomPromptToRecents(content: String) {
|
||||
viewModelScope.launch {
|
||||
repository.saveCustomPrompt(content)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a prompt by ID.
|
||||
*/
|
||||
fun deletePrompt(id: String) {
|
||||
viewModelScope.launch {
|
||||
repository.deletePrompt(id)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all recent prompts.
|
||||
*/
|
||||
fun clearRecentPrompts() {
|
||||
viewModelScope.launch {
|
||||
repository.deleteAllPrompts()
|
||||
}
|
||||
}
|
||||
|
||||
companion object {
|
||||
private val TAG = SystemPromptViewModel::class.java.simpleName
|
||||
|
||||
private const val SUBSCRIPTION_TIMEOUT_MS = 5000L
|
||||
}
|
||||
}
|
||||
Loading…
Reference in New Issue