llama.cpp/examples/openai/llama_cpp_server_api.py

13 lines
322 B
Python

from typing import Optional
from pydantic import Json
from examples.openai.api import LlamaCppParams
class LlamaCppServerCompletionRequest(LlamaCppParams):
prompt: str
stream: Optional[bool] = None
cache_prompt: Optional[bool] = None
grammar: Optional[str] = None
json_schema: Optional[Json] = None