From f6f5715a861bb7eb3f8f4d147688a011bbda6adb Mon Sep 17 00:00:00 2001 From: kushagharahi <3326002+kushagharahi@users.noreply.github.com> Date: Fri, 6 Mar 2026 00:24:24 -0600 Subject: [PATCH] =?UTF-8?q?LLAMA=5FSERVER=5FNO=5FWEBUI=20=E2=86=92=20LLAMA?= =?UTF-8?q?=5FBUILD=5FWEBUI?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CMakeLists.txt | 2 +- tools/server/CMakeLists.txt | 6 +++--- tools/server/server-http.cpp | 6 +++--- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 425d7addc1..9c6bcbead8 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -108,7 +108,7 @@ option(LLAMA_BUILD_TESTS "llama: build tests" ${LLAMA_STANDALONE}) option(LLAMA_BUILD_TOOLS "llama: build tools" ${LLAMA_STANDALONE}) option(LLAMA_BUILD_EXAMPLES "llama: build examples" ${LLAMA_STANDALONE}) option(LLAMA_BUILD_SERVER "llama: build server example" ${LLAMA_STANDALONE}) -option(LLAMA_SERVER_NO_WEBUI "llama: disable the embedded Web UI in server" OFF) +option(LLAMA_BUILD_WEBUI "llama: build the embedded Web UI for server" ${LLAMA_STANDALONE}) option(LLAMA_TOOLS_INSTALL "llama: install tools" ${LLAMA_TOOLS_INSTALL_DEFAULT}) option(LLAMA_TESTS_INSTALL "llama: install tests" ON) diff --git a/tools/server/CMakeLists.txt b/tools/server/CMakeLists.txt index ceb17ddd1e..abb6637716 100644 --- a/tools/server/CMakeLists.txt +++ b/tools/server/CMakeLists.txt @@ -36,9 +36,9 @@ set(TARGET_SRCS server-models.h ) -option(LLAMA_SERVER_NO_WEBUI "Disable the embedded Web UI" OFF) +option(LLAMA_BUILD_WEBUI "Build the embedded Web UI" ${LLAMA_STANDALONE}) -if (NOT LLAMA_SERVER_NO_WEBUI) +if (LLAMA_BUILD_WEBUI) set(PUBLIC_ASSETS index.html.gz loading.html @@ -55,8 +55,8 @@ if (NOT LLAMA_SERVER_NO_WEBUI) ) set_source_files_properties(${output} PROPERTIES GENERATED TRUE) endforeach() + add_definitions(-DLLAMA_BUILD_WEBUI) else() - add_definitions(-DLLAMA_SERVER_NO_WEBUI) endif() add_executable(${TARGET} ${TARGET_SRCS}) diff --git a/tools/server/server-http.cpp b/tools/server/server-http.cpp index a362d959a9..8cc96684f1 100644 --- a/tools/server/server-http.cpp +++ b/tools/server/server-http.cpp @@ -8,7 +8,7 @@ #include #include -#ifndef LLAMA_SERVER_NO_WEBUI +#ifdef LLAMA_BUILD_WEBUI // auto generated files (see README.md for details) #include "index.html.gz.hpp" #include "loading.html.hpp" @@ -183,7 +183,7 @@ bool server_http_context::init(const common_params & params) { auto middleware_server_state = [this](const httplib::Request & req, httplib::Response & res) { bool ready = is_ready.load(); if (!ready) { -#ifndef LLAMA_SERVER_NO_WEBUI +#ifdef LLAMA_BUILD_WEBUI auto tmp = string_split(req.path, '.'); if (req.path == "/" || tmp.back() == "html") { res.status = 503; @@ -254,7 +254,7 @@ bool server_http_context::init(const common_params & params) { return 1; } } else { -#ifndef LLAMA_SERVER_NO_WEBUI +#ifdef LLAMA_BUILD_WEBUI // using embedded static index.html srv->Get(params.api_prefix + "/", [](const httplib::Request & req, httplib::Response & res) { if (req.get_header_value("Accept-Encoding").find("gzip") == std::string::npos) {