core: verify model file path is readable

This commit is contained in:
Han Yin 2025-08-06 15:54:35 -07:00
parent ca1cda37fd
commit 173c4c61a4
2 changed files with 15 additions and 8 deletions

View File

@ -186,7 +186,7 @@ internal class InferenceServiceImpl @Inject internal constructor(
) )
} }
} catch (e: Exception) { } catch (e: Exception) {
Log.e(TAG, "Error loading model", e) Log.e(TAG, e.message, e)
null null
} }
} }

View File

@ -17,6 +17,7 @@ import kotlinx.coroutines.flow.flowOn
import kotlinx.coroutines.launch import kotlinx.coroutines.launch
import kotlinx.coroutines.withContext import kotlinx.coroutines.withContext
import java.io.File import java.io.File
import java.io.IOException
/** /**
* JNI wrapper for the llama.cpp library providing Android-friendly access to large language models. * JNI wrapper for the llama.cpp library providing Android-friendly access to large language models.
@ -128,26 +129,32 @@ internal class InferenceEngineImpl private constructor(
check(_state.value is InferenceEngine.State.Initialized) { check(_state.value is InferenceEngine.State.Initialized) {
"Cannot load model in ${_state.value.javaClass.simpleName}!" "Cannot load model in ${_state.value.javaClass.simpleName}!"
} }
File(pathToModel).let {
require(it.exists()) { "Model file not found: $pathToModel" }
require(it.isFile) { "Model file is not a file: $pathToModel" }
}
try { try {
Log.i(TAG, "Checking access to model file... \n$pathToModel")
File(pathToModel).let {
require(it.exists()) { "File not found" }
require(it.isFile) { "Not a valid file" }
require(it.canRead()) { "Cannot read file" }
}
Log.i(TAG, "Loading model... \n$pathToModel") Log.i(TAG, "Loading model... \n$pathToModel")
_readyForSystemPrompt = false _readyForSystemPrompt = false
_state.value = InferenceEngine.State.LoadingModel _state.value = InferenceEngine.State.LoadingModel
load(pathToModel).let { load(pathToModel).let {
if (it != 0) throw IllegalStateException("Failed to load the model!") // TODO-han.yin: find a better way to pass other error codes
if (it != 0) throw IOException("Unsupported architecture")
} }
prepare().let { prepare().let {
if (it != 0) throw IllegalStateException("Failed to prepare resources!") if (it != 0) throw IOException("Failed to prepare resources")
} }
Log.i(TAG, "Model loaded!") Log.i(TAG, "Model loaded!")
_readyForSystemPrompt = true _readyForSystemPrompt = true
_state.value = InferenceEngine.State.ModelReady _state.value = InferenceEngine.State.ModelReady
} catch (e: Exception) { } catch (e: Exception) {
_state.value = InferenceEngine.State.Error(e.message ?: "Unknown error") val msg = e.message ?: "Unknown error"
Log.e(TAG, msg + "\n" + pathToModel, e)
_state.value = InferenceEngine.State.Error(msg)
throw e throw e
} }
} }