Move to server-chat.cpp instead

This commit is contained in:
Piotr Wilkin 2026-03-19 21:50:57 +01:00
parent 8686b807f7
commit fcad53c54e
11 changed files with 43 additions and 98 deletions

View File

@ -55,8 +55,6 @@ add_library(${TARGET} STATIC
chat-peg-parser.h
chat.cpp
chat.h
chat-conversion.cpp
chat-conversion.h
common.cpp
common.h
console.cpp

View File

@ -385,6 +385,34 @@ static json render_message_to_json(const std::vector<common_chat_msg> & msgs, co
return messages;
}
// DEPRECATED: only used in tests
json common_chat_msgs_to_json_oaicompat(const std::vector<common_chat_msg> & msgs, bool concat_typed_text) {
jinja::caps c;
c.supports_string_content = true;
c.supports_typed_content = !concat_typed_text;
return render_message_to_json(msgs, c);
}
json common_chat_tools_to_json_oaicompat(const std::vector<common_chat_tool> & tools) {
if (tools.empty()) {
return json();
}
auto result = json::array();
for (const auto & tool : tools) {
result.push_back({
{ "type", "function" },
{ "function",
{
{ "name", tool.name },
{ "description", tool.description },
{ "parameters", json::parse(tool.parameters) },
} },
});
}
return result;
}
std::vector<common_chat_tool> common_chat_tools_parse_oaicompat(const json & tools) {
std::vector<common_chat_tool> result;

View File

@ -291,8 +291,10 @@ std::vector<common_chat_msg> common_chat_msgs_parse_oaicompat(const nlohmann::or
std::vector<common_chat_tool> common_chat_tools_parse_oaicompat(const nlohmann::ordered_json & tools);
// Chat conversion functions (defined in chat-conversion.h)
#include "chat-conversion.h"
// DEPRECATED: only used in tests
nlohmann::ordered_json common_chat_msgs_to_json_oaicompat(const std::vector<common_chat_msg> & msgs, bool concat_typed_text = false);
nlohmann::ordered_json common_chat_tools_to_json_oaicompat(const std::vector<common_chat_tool> & tools);
// get template caps, useful for reporting to server /props endpoint
std::map<std::string, bool> common_chat_templates_get_caps(const common_chat_templates * chat_templates);

View File

@ -154,6 +154,8 @@ if (NOT WIN32 OR NOT BUILD_SHARED_LIBS)
llama_build_and_test(test-grammar-integration.cpp)
llama_build_and_test(test-llama-grammar.cpp)
llama_build_and_test(test-chat.cpp WORKING_DIRECTORY ${PROJECT_SOURCE_DIR})
target_include_directories(test-chat PRIVATE ${PROJECT_SOURCE_DIR}/tools/server)
target_link_libraries(test-chat PRIVATE server-context)
# TODO: disabled on loongarch64 because the ggml-ci node lacks Python 3.8
if (NOT ${CMAKE_SYSTEM_PROCESSOR} MATCHES "loongarch64")
llama_build_and_test(test-json-schema-to-grammar.cpp WORKING_DIRECTORY ${PROJECT_SOURCE_DIR})

View File

@ -7,6 +7,7 @@
//
#include "../src/llama-grammar.h"
#include "../src/unicode.h"
#include "../tools/server/server-chat.h"
#include "chat-auto-parser.h"
#include "chat.h"
#include "common.h"

View File

@ -5,6 +5,8 @@ include_directories(${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR})
set(TARGET server-context)
add_library(${TARGET} STATIC
server-chat.cpp
server-chat.h
server-task.cpp
server-task.h
server-queue.cpp

View File

@ -1,62 +1,8 @@
#include "chat-conversion.h"
#include "common.h"
#include "log.h"
#include "jinja/caps.h"
#include "server-chat.h"
#include "server-common.h"
#include <sstream>
#define JSON_ASSERT GGML_ASSERT
// Helper function for JSON value extraction with default
template <typename T>
static T json_value(const json & body, const std::string & key, const T & default_value) {
// Fallback null to default value
if (body.contains(key) && !body.at(key).is_null()) {
try {
return body.at(key);
} catch (NLOHMANN_JSON_NAMESPACE::detail::type_error const & err) {
LOG_WRN("Wrong type supplied for parameter '%s'. Expected '%s', using default value: %s\n", key.c_str(), json(default_value).type_name(), err.what());
return default_value;
}
} else {
return default_value;
}
}
// Helper function to render messages to JSON (shared with chat.cpp)
static json render_message_to_json(const std::vector<common_chat_msg> & msgs, const jinja::caps & c) {
if (!c.supports_string_content && !c.supports_typed_content) {
LOG_WRN("%s: Neither string content nor typed content is supported by the template. This is unexpected and may lead to issues.\n", __func__);
}
bool only_string_accepted = c.supports_string_content && !c.supports_typed_content;
bool only_typed_accepted = !c.supports_string_content && c.supports_typed_content;
json messages = json::array();
for (const auto & msg : msgs) {
if (only_string_accepted) {
json jmsg = msg.to_json_oaicompat(/* concat_typed_text= */ true);
messages.push_back(jmsg);
} else if (only_typed_accepted) {
json jmsg = msg.to_json_oaicompat(/* concat_typed_text= */ false);
if (jmsg.at("content").is_string()) {
jmsg["content"] = json::array({
json{
{"type", "text"},
{"text", jmsg.at("content").get<std::string>()},
}
});
}
messages.push_back(jmsg);
} else {
json jmsg = msg.to_json_oaicompat(/* concat_typed_text= */ false);
messages.push_back(jmsg);
}
}
return messages;
}
json common_chat_convert_responses_to_chatcmpl(const json & response_body) {
if (!response_body.contains("input")) {
throw std::invalid_argument("'input' is required");
@ -557,34 +503,6 @@ json common_chat_convert_anthropic_to_oai(const json & body) {
return oai_body;
}
// DEPRECATED: only used in tests
json common_chat_msgs_to_json_oaicompat(const std::vector<common_chat_msg> & msgs, bool concat_typed_text) {
jinja::caps c;
c.supports_string_content = true;
c.supports_typed_content = !concat_typed_text;
return render_message_to_json(msgs, c);
}
json common_chat_tools_to_json_oaicompat(const std::vector<common_chat_tool> & tools) {
if (tools.empty()) {
return json();
}
auto result = json::array();
for (const auto & tool : tools) {
result.push_back({
{ "type", "function" },
{ "function",
{
{ "name", tool.name },
{ "description", tool.description },
{ "parameters", json::parse(tool.parameters) },
} },
});
}
return result;
}
json common_chat_msg_diff_to_json_oaicompat(const common_chat_msg_diff & diff) {
json delta = json::object();
if (!diff.reasoning_content_delta.empty()) {

View File

@ -1,12 +1,10 @@
// Chat conversion functions for OpenAI API compatibility
// Chat conversion functions for server (Responses API, Anthropic API, OAI streaming diffs)
#pragma once
#include "chat.h"
#include "nlohmann/json.hpp"
#include <string>
#include <vector>
#include <nlohmann/json.hpp>
using json = nlohmann::ordered_json;
@ -16,9 +14,4 @@ json common_chat_convert_responses_to_chatcmpl(const json & body);
// Convert Anthropic Messages API format to OpenAI Chat Completions API format
json common_chat_convert_anthropic_to_oai(const json & body);
// DEPRECATED: only used in tests
json common_chat_msgs_to_json_oaicompat(const std::vector<common_chat_msg> & msgs, bool concat_typed_text = false);
json common_chat_tools_to_json_oaicompat(const std::vector<common_chat_tool> & tools);
json common_chat_msg_diff_to_json_oaicompat(const common_chat_msg_diff & diff);

View File

@ -5,7 +5,6 @@
#include "mtmd.h"
#include "mtmd-helper.h"
#include "chat.h"
#include "chat-conversion.h"
#include "base64.hpp"
#include "server-common.h"

View File

@ -1,4 +1,5 @@
#include "server-context.h"
#include "server-chat.h"
#include "server-common.h"
#include "server-http.h"
#include "server-task.h"

View File

@ -1,5 +1,6 @@
#include "server-task.h"
#include "server-chat.h"
#include "chat.h"
#include "common.h"
#include "json-schema-to-grammar.h"