llama: update the library's package name and namespace

This commit is contained in:
Han Yin 2025-09-19 13:35:01 -07:00
parent 96817ae667
commit 56e83b723b
41 changed files with 89 additions and 87 deletions

View File

@ -70,7 +70,7 @@ dependencies {
implementation(libs.accompanist.systemuicontroller) implementation(libs.accompanist.systemuicontroller)
// Subproject // Subproject
implementation(project(":llama")) implementation(project(":lib"))
debugImplementation(libs.bundles.debug) debugImplementation(libs.bundles.debug)
testImplementation(libs.junit) testImplementation(libs.junit)

View File

@ -3,8 +3,6 @@ package com.example.llama
import android.app.Activity import android.app.Activity
import android.content.ActivityNotFoundException import android.content.ActivityNotFoundException
import android.content.Intent import android.content.Intent
import android.llama.cpp.InferenceEngine.State
import android.llama.cpp.isUninterruptible
import android.os.Bundle import android.os.Bundle
import androidx.activity.ComponentActivity import androidx.activity.ComponentActivity
import androidx.activity.compose.rememberLauncherForActivityResult import androidx.activity.compose.rememberLauncherForActivityResult
@ -32,6 +30,8 @@ import androidx.navigation.compose.composable
import androidx.navigation.compose.currentBackStackEntryAsState import androidx.navigation.compose.currentBackStackEntryAsState
import androidx.navigation.compose.rememberNavController import androidx.navigation.compose.rememberNavController
import androidx.navigation.navArgument import androidx.navigation.navArgument
import com.arm.aichat.InferenceEngine.State
import com.arm.aichat.isUninterruptible
import com.example.llama.engine.ModelLoadingMetrics import com.example.llama.engine.ModelLoadingMetrics
import com.example.llama.navigation.AppDestinations import com.example.llama.navigation.AppDestinations
import com.example.llama.navigation.NavigationActions import com.example.llama.navigation.NavigationActions

View File

@ -1,11 +1,11 @@
package com.example.llama.data.model package com.example.llama.data.model
import kotlinx.serialization.Serializable import kotlinx.serialization.Serializable
import android.llama.cpp.gguf.GgufMetadata as Domain import com.arm.aichat.gguf.GgufMetadata as Domain
/** /**
* A local serializable domain replicate of [android.llama.cpp.gguf.GgufMetadata] * A local serializable domain replicate of [com.arm.aichat.gguf.GgufMetadata]
*/ */
@Serializable @Serializable
data class GgufMetadata( data class GgufMetadata(

View File

@ -1,6 +1,6 @@
package com.example.llama.data.model package com.example.llama.data.model
import android.llama.cpp.gguf.FileType import com.arm.aichat.gguf.FileType
import com.example.llama.util.formatContextLength import com.example.llama.util.formatContextLength
import com.example.llama.util.formatFileByteSize import com.example.llama.util.formatFileByteSize

View File

@ -1,11 +1,11 @@
package com.example.llama.data.repo package com.example.llama.data.repo
import android.content.Context import android.content.Context
import android.llama.cpp.gguf.GgufMetadataReader
import android.llama.cpp.gguf.InvalidFileFormatException
import android.net.Uri import android.net.Uri
import android.os.StatFs import android.os.StatFs
import android.util.Log import android.util.Log
import com.arm.aichat.gguf.GgufMetadataReader
import com.arm.aichat.gguf.InvalidFileFormatException
import com.example.llama.data.db.dao.ModelDao import com.example.llama.data.db.dao.ModelDao
import com.example.llama.data.db.entity.ModelEntity import com.example.llama.data.db.entity.ModelEntity
import com.example.llama.data.model.GgufMetadata import com.example.llama.data.model.GgufMetadata

View File

@ -1,10 +1,10 @@
package com.example.llama.di package com.example.llama.di
import android.content.Context import android.content.Context
import android.llama.cpp.InferenceEngine import com.arm.aichat.AiChat
import android.llama.cpp.AiChat import com.arm.aichat.InferenceEngine
import android.llama.cpp.TierDetection import com.arm.aichat.TierDetection
import android.llama.cpp.gguf.GgufMetadataReader import com.arm.aichat.gguf.GgufMetadataReader
import com.example.llama.data.db.AppDatabase import com.example.llama.data.db.AppDatabase
import com.example.llama.data.repo.ModelRepository import com.example.llama.data.repo.ModelRepository
import com.example.llama.data.repo.ModelRepositoryImpl import com.example.llama.data.repo.ModelRepositoryImpl

View File

@ -1,8 +1,8 @@
package com.example.llama.engine package com.example.llama.engine
import android.llama.cpp.InferenceEngine
import android.llama.cpp.InferenceEngine.State
import android.util.Log import android.util.Log
import com.arm.aichat.InferenceEngine
import com.arm.aichat.InferenceEngine.State
import com.example.llama.data.model.ModelInfo import com.example.llama.data.model.ModelInfo
import kotlinx.coroutines.flow.Flow import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.MutableStateFlow import kotlinx.coroutines.flow.MutableStateFlow

View File

@ -1,8 +1,8 @@
package com.example.llama.engine package com.example.llama.engine
import android.llama.cpp.InferenceEngine
import android.llama.cpp.InferenceEngine.State
import android.util.Log import android.util.Log
import com.arm.aichat.InferenceEngine
import com.arm.aichat.InferenceEngine.State
import com.example.llama.APP_NAME import com.example.llama.APP_NAME
import kotlinx.coroutines.CancellationException import kotlinx.coroutines.CancellationException
import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.CoroutineScope

View File

@ -1,7 +1,7 @@
package com.example.llama.engine package com.example.llama.engine
import android.llama.cpp.LLamaTier import com.arm.aichat.LLamaTier
import android.llama.cpp.TierDetection import com.arm.aichat.TierDetection
import android.util.Log import android.util.Log
/** /**

View File

@ -1,7 +1,5 @@
package com.example.llama.ui.components package com.example.llama.ui.components
import android.llama.cpp.ArmFeature
import android.llama.cpp.ArmFeaturesMapper.DisplayItem
import androidx.compose.foundation.layout.fillMaxWidth import androidx.compose.foundation.layout.fillMaxWidth
import androidx.compose.foundation.text.TextAutoSize import androidx.compose.foundation.text.TextAutoSize
import androidx.compose.material3.MaterialTheme import androidx.compose.material3.MaterialTheme
@ -12,6 +10,8 @@ import androidx.compose.material3.Text
import androidx.compose.runtime.Composable import androidx.compose.runtime.Composable
import androidx.compose.ui.Modifier import androidx.compose.ui.Modifier
import androidx.compose.ui.text.font.FontWeight import androidx.compose.ui.text.font.FontWeight
import com.arm.aichat.ArmFeature
import com.arm.aichat.ArmFeaturesMapper.DisplayItem
import kotlin.math.sqrt import kotlin.math.sqrt
/** /**

View File

@ -1,7 +1,6 @@
package com.example.llama.ui.screens package com.example.llama.ui.screens
import android.content.Intent import android.content.Intent
import android.llama.cpp.InferenceEngine.State
import android.widget.Toast import android.widget.Toast
import androidx.activity.compose.BackHandler import androidx.activity.compose.BackHandler
import androidx.compose.foundation.background import androidx.compose.foundation.background
@ -50,6 +49,7 @@ import androidx.compose.ui.text.font.FontWeight
import androidx.compose.ui.text.style.TextAlign import androidx.compose.ui.text.style.TextAlign
import androidx.compose.ui.unit.dp import androidx.compose.ui.unit.dp
import androidx.core.net.toUri import androidx.core.net.toUri
import com.arm.aichat.InferenceEngine.State
import com.example.llama.data.model.ModelInfo import com.example.llama.data.model.ModelInfo
import com.example.llama.engine.ModelLoadingMetrics import com.example.llama.engine.ModelLoadingMetrics
import com.example.llama.ui.components.ModelCardContentArchitectureRow import com.example.llama.ui.components.ModelCardContentArchitectureRow

View File

@ -1,7 +1,6 @@
package com.example.llama.ui.screens package com.example.llama.ui.screens
import android.content.Intent import android.content.Intent
import android.llama.cpp.InferenceEngine.State
import android.widget.Toast import android.widget.Toast
import androidx.activity.compose.BackHandler import androidx.activity.compose.BackHandler
import androidx.compose.animation.core.LinearEasing import androidx.compose.animation.core.LinearEasing
@ -63,6 +62,7 @@ import androidx.core.net.toUri
import androidx.lifecycle.Lifecycle import androidx.lifecycle.Lifecycle
import androidx.lifecycle.LifecycleEventObserver import androidx.lifecycle.LifecycleEventObserver
import androidx.lifecycle.compose.LocalLifecycleOwner import androidx.lifecycle.compose.LocalLifecycleOwner
import com.arm.aichat.InferenceEngine.State
import com.example.llama.data.model.ModelInfo import com.example.llama.data.model.ModelInfo
import com.example.llama.engine.ModelLoadingMetrics import com.example.llama.engine.ModelLoadingMetrics
import com.example.llama.engine.TokenMetrics import com.example.llama.engine.TokenMetrics

View File

@ -1,6 +1,5 @@
package com.example.llama.ui.screens package com.example.llama.ui.screens
import android.llama.cpp.gguf.FileType
import androidx.compose.foundation.layout.Arrangement import androidx.compose.foundation.layout.Arrangement
import androidx.compose.foundation.layout.Column import androidx.compose.foundation.layout.Column
import androidx.compose.foundation.layout.ExperimentalLayoutApi import androidx.compose.foundation.layout.ExperimentalLayoutApi
@ -22,6 +21,7 @@ import androidx.compose.ui.Modifier
import androidx.compose.ui.text.font.FontWeight import androidx.compose.ui.text.font.FontWeight
import androidx.compose.ui.text.style.TextOverflow import androidx.compose.ui.text.style.TextOverflow
import androidx.compose.ui.unit.dp import androidx.compose.ui.unit.dp
import com.arm.aichat.gguf.FileType
import com.example.llama.data.model.ModelInfo import com.example.llama.data.model.ModelInfo
import com.example.llama.ui.components.ModelCardContentArchitectureRow import com.example.llama.ui.components.ModelCardContentArchitectureRow
import com.example.llama.ui.components.ModelCardContentContextRow import com.example.llama.ui.components.ModelCardContentContextRow

View File

@ -1,8 +1,6 @@
package com.example.llama.ui.screens package com.example.llama.ui.screens
import android.content.Intent import android.content.Intent
import android.llama.cpp.InferenceEngine.State
import android.llama.cpp.UnsupportedArchitectureException
import android.widget.Toast import android.widget.Toast
import androidx.activity.compose.BackHandler import androidx.activity.compose.BackHandler
import androidx.compose.animation.AnimatedVisibility import androidx.compose.animation.AnimatedVisibility
@ -60,6 +58,8 @@ import androidx.compose.ui.semantics.Role
import androidx.compose.ui.text.style.TextOverflow import androidx.compose.ui.text.style.TextOverflow
import androidx.compose.ui.unit.dp import androidx.compose.ui.unit.dp
import androidx.core.net.toUri import androidx.core.net.toUri
import com.arm.aichat.InferenceEngine.State
import com.arm.aichat.UnsupportedArchitectureException
import com.example.llama.data.model.SystemPrompt import com.example.llama.data.model.SystemPrompt
import com.example.llama.engine.ModelLoadingMetrics import com.example.llama.engine.ModelLoadingMetrics
import com.example.llama.ui.components.ModelCardCoreExpandable import com.example.llama.ui.components.ModelCardCoreExpandable

View File

@ -1,8 +1,6 @@
package com.example.llama.ui.screens package com.example.llama.ui.screens
import android.content.Intent import android.content.Intent
import android.llama.cpp.ArmFeaturesMapper
import android.llama.cpp.ArmFeaturesMapper.DisplayItem
import androidx.compose.foundation.layout.Column import androidx.compose.foundation.layout.Column
import androidx.compose.foundation.layout.Row import androidx.compose.foundation.layout.Row
import androidx.compose.foundation.layout.Spacer import androidx.compose.foundation.layout.Spacer
@ -30,6 +28,8 @@ import androidx.compose.ui.Modifier
import androidx.compose.ui.platform.LocalContext import androidx.compose.ui.platform.LocalContext
import androidx.compose.ui.unit.dp import androidx.compose.ui.unit.dp
import androidx.core.net.toUri import androidx.core.net.toUri
import com.arm.aichat.ArmFeaturesMapper
import com.arm.aichat.ArmFeaturesMapper.DisplayItem
import com.example.llama.APP_NAME import com.example.llama.APP_NAME
import com.example.llama.BuildConfig import com.example.llama.BuildConfig
import com.example.llama.data.source.prefs.ColorThemeMode import com.example.llama.data.source.prefs.ColorThemeMode

View File

@ -1,7 +1,7 @@
package com.example.llama.viewmodel package com.example.llama.viewmodel
import android.llama.cpp.isUninterruptible
import androidx.lifecycle.viewModelScope import androidx.lifecycle.viewModelScope
import com.arm.aichat.isUninterruptible
import com.example.llama.data.model.ModelInfo import com.example.llama.data.model.ModelInfo
import com.example.llama.engine.BenchmarkService import com.example.llama.engine.BenchmarkService
import com.example.llama.ui.scaffold.ScaffoldEvent import com.example.llama.ui.scaffold.ScaffoldEvent

View File

@ -1,11 +1,11 @@
package com.example.llama.viewmodel package com.example.llama.viewmodel
import android.llama.cpp.InferenceEngine
import android.llama.cpp.InferenceEngine.State
import android.llama.cpp.isModelLoaded
import android.llama.cpp.isUninterruptible
import androidx.lifecycle.ViewModel import androidx.lifecycle.ViewModel
import androidx.lifecycle.viewModelScope import androidx.lifecycle.viewModelScope
import com.arm.aichat.InferenceEngine
import com.arm.aichat.InferenceEngine.State
import com.arm.aichat.isModelLoaded
import com.arm.aichat.isUninterruptible
import com.example.llama.engine.InferenceService import com.example.llama.engine.InferenceService
import kotlinx.coroutines.flow.MutableStateFlow import kotlinx.coroutines.flow.MutableStateFlow
import kotlinx.coroutines.flow.StateFlow import kotlinx.coroutines.flow.StateFlow

View File

@ -6,11 +6,11 @@ import android.content.Context
import android.content.Context.RECEIVER_EXPORTED import android.content.Context.RECEIVER_EXPORTED
import android.content.Intent import android.content.Intent
import android.content.IntentFilter import android.content.IntentFilter
import android.llama.cpp.gguf.InvalidFileFormatException
import android.net.Uri import android.net.Uri
import android.util.Log import android.util.Log
import androidx.lifecycle.ViewModel import androidx.lifecycle.ViewModel
import androidx.lifecycle.viewModelScope import androidx.lifecycle.viewModelScope
import com.arm.aichat.gguf.InvalidFileFormatException
import com.example.llama.data.model.ModelInfo import com.example.llama.data.model.ModelInfo
import com.example.llama.data.repo.InsufficientStorageException import com.example.llama.data.repo.InsufficientStorageException
import com.example.llama.data.repo.ModelRepository import com.example.llama.data.repo.ModelRepository

View File

@ -1,9 +1,9 @@
package com.example.llama.viewmodel package com.example.llama.viewmodel
import android.llama.cpp.LLamaTier
import android.llama.cpp.TierDetection
import androidx.lifecycle.ViewModel import androidx.lifecycle.ViewModel
import androidx.lifecycle.viewModelScope import androidx.lifecycle.viewModelScope
import com.arm.aichat.LLamaTier
import com.arm.aichat.TierDetection
import com.example.llama.data.repo.ModelRepository import com.example.llama.data.repo.ModelRepository
import com.example.llama.data.source.prefs.ColorThemeMode import com.example.llama.data.source.prefs.ColorThemeMode
import com.example.llama.data.source.prefs.DarkThemeMode import com.example.llama.data.source.prefs.DarkThemeMode

View File

@ -1,19 +1,19 @@
[versions] [versions]
# Plugins # Plugins
agp = "8.12.2" agp = "8.13.0"
ksp = "2.2.10-2.0.2" ksp = "2.2.10-2.0.2"
kotlin = "2.2.10" kotlin = "2.2.20"
dagger-hilt = "2.57.1" dagger-hilt = "2.57.1"
# AndroidX # AndroidX
activity = "1.10.1" activity = "1.11.0"
core-ktx = "1.17.0" core-ktx = "1.17.0"
datastore-preferences = "1.1.7" datastore-preferences = "1.1.7"
lifecycle = "2.9.3" lifecycle = "2.9.4"
navigation = "2.9.3" navigation = "2.9.4"
room = "2.7.2" room = "2.8.0"
hilt = "1.2.0" hilt = "1.3.0"
retrofit2 = "3.0.0" retrofit2 = "3.0.0"
okhttp3 = "5.1.0" okhttp3 = "5.1.0"
@ -22,11 +22,11 @@ coroutines = "1.10.2"
serialization = "1.9.0" serialization = "1.9.0"
# Compose # Compose
compose-bom = "2025.08.01" compose-bom = "2025.09.00"
compose-foundation = "1.9.0" compose-foundation = "1.9.1"
compose-material-icons = "1.7.8" compose-material-icons = "1.7.8"
compose-material3 = "1.4.0-beta03" compose-material3 = "1.4.0-rc01"
compose-ui = "1.9.0" compose-ui = "1.9.1"
# Accompanist # Accompanist
accompanist = "0.36.0" accompanist = "0.36.0"

View File

@ -5,7 +5,7 @@ plugins {
} }
android { android {
namespace = "android.llama.cpp" namespace = "com.arm.aichat"
compileSdk = 36 compileSdk = 36
ndkVersion = "29.0.13113456" ndkVersion = "29.0.13113456"
@ -84,8 +84,8 @@ publishing {
publications { publications {
register<MavenPublication>("release") { register<MavenPublication>("release") {
groupId = "com.arm" groupId = "com.arm"
artifactId = "kleidi-llama" artifactId = "ai-chat"
version = "1.0.0" version = "0.1.0"
afterEvaluate { afterEvaluate {
from(components["release"]) from(components["release"])

View File

@ -13,7 +13,7 @@ static const Aarch64Features features = info.features;
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__) #define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
extern "C" JNIEXPORT jint JNICALL extern "C" JNIEXPORT jint JNICALL
Java_android_llama_cpp_internal_TierDetectionImpl_getOptimalTier( Java_com_arm_aichat_internal_TierDetectionImpl_getOptimalTier(
JNIEnv* /*env*/, JNIEnv* /*env*/,
jobject /*clazz*/) { jobject /*clazz*/) {
int tier = 0; // Default to T0 (baseline) int tier = 0; // Default to T0 (baseline)
@ -54,7 +54,7 @@ Java_android_llama_cpp_internal_TierDetectionImpl_getOptimalTier(
// Optional: Keep a feature string function for debugging // Optional: Keep a feature string function for debugging
extern "C" JNIEXPORT jstring JNICALL extern "C" JNIEXPORT jstring JNICALL
Java_android_llama_cpp_internal_TierDetectionImpl_getCpuFeaturesString( Java_com_arm_aichat_internal_TierDetectionImpl_getCpuFeaturesString(
JNIEnv* env, JNIEnv* env,
jobject /*clazz*/) { jobject /*clazz*/) {
std::string text; std::string text;

View File

@ -72,7 +72,7 @@ static void log_callback(ggml_log_level level, const char *fmt, void *data) {
extern "C" extern "C"
JNIEXPORT void JNICALL JNIEXPORT void JNICALL
Java_android_llama_cpp_internal_InferenceEngineImpl_init(JNIEnv *env, jobject /*unused*/, jstring nativeLibDir) { Java_com_arm_aichat_internal_InferenceEngineImpl_init(JNIEnv *env, jobject /*unused*/, jstring nativeLibDir) {
// Set llama log handler to Android // Set llama log handler to Android
llama_log_set(log_callback, nullptr); llama_log_set(log_callback, nullptr);
@ -89,7 +89,7 @@ Java_android_llama_cpp_internal_InferenceEngineImpl_init(JNIEnv *env, jobject /*
extern "C" extern "C"
JNIEXPORT jint JNICALL JNIEXPORT jint JNICALL
Java_android_llama_cpp_internal_InferenceEngineImpl_load(JNIEnv *env, jobject, jstring jmodel_path) { Java_com_arm_aichat_internal_InferenceEngineImpl_load(JNIEnv *env, jobject, jstring jmodel_path) {
llama_model_params model_params = llama_model_default_params(); llama_model_params model_params = llama_model_default_params();
const auto *model_path = env->GetStringUTFChars(jmodel_path, 0); const auto *model_path = env->GetStringUTFChars(jmodel_path, 0);
@ -143,7 +143,7 @@ static common_sampler *new_sampler(float temp) {
extern "C" extern "C"
JNIEXPORT jint JNICALL JNIEXPORT jint JNICALL
Java_android_llama_cpp_internal_InferenceEngineImpl_prepare(JNIEnv * /*env*/, jobject /*unused*/) { Java_com_arm_aichat_internal_InferenceEngineImpl_prepare(JNIEnv * /*env*/, jobject /*unused*/) {
auto *context = init_context(g_model); auto *context = init_context(g_model);
if (!context) { return 1; } if (!context) { return 1; }
g_context = context; g_context = context;
@ -167,13 +167,13 @@ static std::string get_backend() {
extern "C" extern "C"
JNIEXPORT jstring JNICALL JNIEXPORT jstring JNICALL
Java_android_llama_cpp_internal_InferenceEngineImpl_systemInfo(JNIEnv *env, jobject /*unused*/) { Java_com_arm_aichat_internal_InferenceEngineImpl_systemInfo(JNIEnv *env, jobject /*unused*/) {
return env->NewStringUTF(llama_print_system_info()); return env->NewStringUTF(llama_print_system_info());
} }
extern "C" extern "C"
JNIEXPORT jstring JNICALL JNIEXPORT jstring JNICALL
Java_android_llama_cpp_internal_InferenceEngineImpl_benchModel(JNIEnv *env, jobject /*unused*/, jint pp, jint tg, Java_com_arm_aichat_internal_InferenceEngineImpl_benchModel(JNIEnv *env, jobject /*unused*/, jint pp, jint tg,
jint pl, jint nr) { jint pl, jint nr) {
auto *context = init_context(g_model, pp); auto *context = init_context(g_model, pp);
if (!context) { if (!context) {
@ -383,7 +383,7 @@ static int decode_tokens_in_batches(
extern "C" extern "C"
JNIEXPORT jint JNICALL JNIEXPORT jint JNICALL
Java_android_llama_cpp_internal_InferenceEngineImpl_processSystemPrompt( Java_com_arm_aichat_internal_InferenceEngineImpl_processSystemPrompt(
JNIEnv *env, JNIEnv *env,
jobject /*unused*/, jobject /*unused*/,
jstring jsystem_prompt jstring jsystem_prompt
@ -432,7 +432,7 @@ Java_android_llama_cpp_internal_InferenceEngineImpl_processSystemPrompt(
extern "C" extern "C"
JNIEXPORT jint JNICALL JNIEXPORT jint JNICALL
Java_android_llama_cpp_internal_InferenceEngineImpl_processUserPrompt( Java_com_arm_aichat_internal_InferenceEngineImpl_processUserPrompt(
JNIEnv *env, JNIEnv *env,
jobject /*unused*/, jobject /*unused*/,
jstring juser_prompt, jstring juser_prompt,
@ -516,7 +516,7 @@ static bool is_valid_utf8(const char *string) {
extern "C" extern "C"
JNIEXPORT jstring JNICALL JNIEXPORT jstring JNICALL
Java_android_llama_cpp_internal_InferenceEngineImpl_generateNextToken( Java_com_arm_aichat_internal_InferenceEngineImpl_generateNextToken(
JNIEnv *env, JNIEnv *env,
jobject /*unused*/ jobject /*unused*/
) { ) {
@ -576,7 +576,7 @@ Java_android_llama_cpp_internal_InferenceEngineImpl_generateNextToken(
extern "C" extern "C"
JNIEXPORT void JNICALL JNIEXPORT void JNICALL
Java_android_llama_cpp_internal_InferenceEngineImpl_unload(JNIEnv * /*unused*/, jobject /*unused*/) { Java_com_arm_aichat_internal_InferenceEngineImpl_unload(JNIEnv * /*unused*/, jobject /*unused*/) {
// Reset long-term & short-term states // Reset long-term & short-term states
reset_long_term_states(); reset_long_term_states();
reset_short_term_states(); reset_short_term_states();
@ -591,6 +591,6 @@ Java_android_llama_cpp_internal_InferenceEngineImpl_unload(JNIEnv * /*unused*/,
extern "C" extern "C"
JNIEXPORT void JNICALL JNIEXPORT void JNICALL
Java_android_llama_cpp_internal_InferenceEngineImpl_shutdown(JNIEnv *env, jobject /*unused*/) { Java_com_arm_aichat_internal_InferenceEngineImpl_shutdown(JNIEnv *env, jobject /*unused*/) {
llama_backend_free(); llama_backend_free();
} }

View File

@ -1,12 +1,11 @@
package android.llama.cpp package com.arm.aichat
import android.content.Context import android.content.Context
import android.llama.cpp.internal.InferenceEngineImpl import com.arm.aichat.internal.InferenceEngineImpl
import android.llama.cpp.internal.TierDetectionImpl import com.arm.aichat.internal.TierDetectionImpl
/** /**
* Main entry point for the Ai Chat library. * Main entry point for Arm's AI Chat library.
* This is the only class that should be used by library consumers.
*/ */
object AiChat { object AiChat {
/** /**

View File

@ -1,4 +1,4 @@
package android.llama.cpp package com.arm.aichat
/** /**
* Represents an Arm® CPU feature with its metadata. * Represents an Arm® CPU feature with its metadata.

View File

@ -1,6 +1,6 @@
package android.llama.cpp package com.arm.aichat
import android.llama.cpp.InferenceEngine.State import com.arm.aichat.InferenceEngine.State
import kotlinx.coroutines.flow.Flow import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.StateFlow import kotlinx.coroutines.flow.StateFlow

View File

@ -1,4 +1,4 @@
package android.llama.cpp package com.arm.aichat
/** /**
* Public interface for [LLamaTier] detection information. * Public interface for [LLamaTier] detection information.

View File

@ -1,4 +1,6 @@
package android.llama.cpp.gguf package com.arm.aichat.gguf
import kotlin.collections.get
/** /**

View File

@ -1,4 +1,4 @@
package android.llama.cpp.gguf package com.arm.aichat.gguf
import java.io.IOException import java.io.IOException

View File

@ -1,8 +1,8 @@
package android.llama.cpp.gguf package com.arm.aichat.gguf
import android.content.Context import android.content.Context
import android.llama.cpp.internal.gguf.GgufMetadataReaderImpl
import android.net.Uri import android.net.Uri
import com.arm.aichat.internal.gguf.GgufMetadataReaderImpl
import java.io.IOException import java.io.IOException
import java.io.InputStream import java.io.InputStream

View File

@ -1,9 +1,10 @@
package android.llama.cpp.internal package com.arm.aichat.internal
import android.content.Context import android.content.Context
import android.llama.cpp.InferenceEngine
import android.llama.cpp.UnsupportedArchitectureException
import android.util.Log import android.util.Log
import com.arm.aichat.InferenceEngine
import com.arm.aichat.UnsupportedArchitectureException
import com.arm.aichat.internal.InferenceEngineImpl.Companion.getInstance
import dalvik.annotation.optimization.FastNative import dalvik.annotation.optimization.FastNative
import kotlinx.coroutines.CancellationException import kotlinx.coroutines.CancellationException
import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.CoroutineScope
@ -205,7 +206,7 @@ internal class InferenceEngineImpl private constructor(
} }
/** /**
* Send plain text user prompt to LLM, which starts generating tokens in a [kotlinx.coroutines.flow.Flow] * Send plain text user prompt to LLM, which starts generating tokens in a [Flow]
*/ */
override fun sendUserPrompt( override fun sendUserPrompt(
message: String, message: String,

View File

@ -1,14 +1,14 @@
package android.llama.cpp.internal package com.arm.aichat.internal
import android.content.Context import android.content.Context
import android.llama.cpp.LLamaTier
import android.llama.cpp.TierDetection
import android.util.Log import android.util.Log
import androidx.datastore.core.DataStore import androidx.datastore.core.DataStore
import androidx.datastore.preferences.core.Preferences import androidx.datastore.preferences.core.Preferences
import androidx.datastore.preferences.core.edit import androidx.datastore.preferences.core.edit
import androidx.datastore.preferences.core.intPreferencesKey import androidx.datastore.preferences.core.intPreferencesKey
import androidx.datastore.preferences.preferencesDataStore import androidx.datastore.preferences.preferencesDataStore
import com.arm.aichat.LLamaTier
import com.arm.aichat.TierDetection
import kotlinx.coroutines.flow.first import kotlinx.coroutines.flow.first
import kotlinx.coroutines.runBlocking import kotlinx.coroutines.runBlocking

View File

@ -1,10 +1,10 @@
package android.llama.cpp.internal.gguf package com.arm.aichat.internal.gguf
import android.content.Context import android.content.Context
import android.llama.cpp.gguf.GgufMetadata
import android.llama.cpp.gguf.GgufMetadataReader
import android.llama.cpp.gguf.InvalidFileFormatException
import android.net.Uri import android.net.Uri
import com.arm.aichat.gguf.GgufMetadata
import com.arm.aichat.gguf.GgufMetadataReader
import com.arm.aichat.gguf.InvalidFileFormatException
import java.io.IOException import java.io.IOException
import java.io.InputStream import java.io.InputStream

View File

@ -13,6 +13,6 @@ dependencyResolutionManagement {
} }
} }
rootProject.name = "LlamaAndroid" rootProject.name = "AiChat"
include(":app") include(":app")
include(":llama") include(":lib")