claude code generated fish completion support for me
This commit is contained in:
parent
d64c8104f0
commit
e77b49cd56
155
common/arg.cpp
155
common/arg.cpp
|
|
@ -1585,6 +1585,150 @@ static void common_params_print_completion(common_params_context & ctx_arg) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static void common_params_print_completion_fish(common_params_context & ctx_arg) {
|
||||||
|
std::vector<common_arg *> common_options;
|
||||||
|
std::vector<common_arg *> sparam_options;
|
||||||
|
std::vector<common_arg *> specific_options;
|
||||||
|
|
||||||
|
for (auto & opt : ctx_arg.options) {
|
||||||
|
if (opt.is_sparam) {
|
||||||
|
sparam_options.push_back(&opt);
|
||||||
|
} else if (opt.in_example(ctx_arg.ex)) {
|
||||||
|
specific_options.push_back(&opt);
|
||||||
|
} else {
|
||||||
|
common_options.push_back(&opt);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
std::set<std::string> executables = {
|
||||||
|
"llama-batched",
|
||||||
|
"llama-batched-bench",
|
||||||
|
"llama-bench",
|
||||||
|
"llama-cli",
|
||||||
|
"llama-convert-llama2c-to-ggml",
|
||||||
|
"llama-cvector-generator",
|
||||||
|
"llama-embedding",
|
||||||
|
"llama-eval-callback",
|
||||||
|
"llama-export-lora",
|
||||||
|
"llama-gen-docs",
|
||||||
|
"llama-gguf",
|
||||||
|
"llama-gguf-hash",
|
||||||
|
"llama-gguf-split",
|
||||||
|
"llama-gritlm",
|
||||||
|
"llama-imatrix",
|
||||||
|
"llama-infill",
|
||||||
|
"llama-mtmd-cli",
|
||||||
|
"llama-llava-clip-quantize-cli",
|
||||||
|
"llama-lookahead",
|
||||||
|
"llama-lookup",
|
||||||
|
"llama-lookup-create",
|
||||||
|
"llama-lookup-merge",
|
||||||
|
"llama-lookup-stats",
|
||||||
|
"llama-parallel",
|
||||||
|
"llama-passkey",
|
||||||
|
"llama-perplexity",
|
||||||
|
"llama-q8dot",
|
||||||
|
"llama-quantize",
|
||||||
|
"llama-qwen2vl-cli",
|
||||||
|
"llama-retrieval",
|
||||||
|
"llama-run",
|
||||||
|
"llama-save-load-state",
|
||||||
|
"llama-server",
|
||||||
|
"llama-simple",
|
||||||
|
"llama-simple-chat",
|
||||||
|
"llama-speculative",
|
||||||
|
"llama-speculative-simple",
|
||||||
|
"llama-tokenize",
|
||||||
|
"llama-tts",
|
||||||
|
"llama-vdot"
|
||||||
|
};
|
||||||
|
|
||||||
|
// Helper to escape fish strings
|
||||||
|
auto escape_fish = [](const std::string & str) {
|
||||||
|
std::string result;
|
||||||
|
for (char c : str) {
|
||||||
|
if (c == '\'' || c == '\\' || c == '\n') {
|
||||||
|
result += '\\';
|
||||||
|
}
|
||||||
|
result += c;
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Helper to print fish completions for options
|
||||||
|
auto print_fish_option = [&](const common_arg * opt) {
|
||||||
|
std::string short_opt, long_opt;
|
||||||
|
|
||||||
|
for (const char * arg : opt->args) {
|
||||||
|
std::string arg_str(arg);
|
||||||
|
if (arg_str.size() == 2 && arg_str[0] == '-' && arg_str[1] != '-') {
|
||||||
|
short_opt = arg_str.substr(1);
|
||||||
|
} else if (arg_str.size() > 2 && arg_str[0] == '-' && arg_str[1] == '-') {
|
||||||
|
long_opt = arg_str.substr(2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract description (first line only, remove env info)
|
||||||
|
std::string desc = opt->help;
|
||||||
|
size_t newline_pos = desc.find('\n');
|
||||||
|
if (newline_pos != std::string::npos) {
|
||||||
|
desc = desc.substr(0, newline_pos);
|
||||||
|
}
|
||||||
|
desc = escape_fish(desc);
|
||||||
|
|
||||||
|
// Determine if option takes an argument
|
||||||
|
bool has_value = opt->value_hint != nullptr || opt->handler_string != nullptr ||
|
||||||
|
opt->handler_int != nullptr || opt->handler_str_str != nullptr;
|
||||||
|
|
||||||
|
for (const auto& exe : executables) {
|
||||||
|
printf("complete -c %s", exe.c_str());
|
||||||
|
|
||||||
|
if (!short_opt.empty()) {
|
||||||
|
printf(" -s %s", short_opt.c_str());
|
||||||
|
}
|
||||||
|
if (!long_opt.empty()) {
|
||||||
|
printf(" -l %s", long_opt.c_str());
|
||||||
|
}
|
||||||
|
if (!desc.empty()) {
|
||||||
|
printf(" -d '%s'", desc.c_str());
|
||||||
|
}
|
||||||
|
if (has_value) {
|
||||||
|
printf(" -r");
|
||||||
|
// Add specific file completions
|
||||||
|
if (long_opt == "model" || short_opt == "m") {
|
||||||
|
printf("F"); // disable default file completion, we'll add custom
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
printf(" -f"); // no file completion for flags
|
||||||
|
}
|
||||||
|
printf("\n");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Print header comment
|
||||||
|
printf("# Fish shell completions for llama.cpp\n");
|
||||||
|
printf("# Generated by --completion-fish\n\n");
|
||||||
|
|
||||||
|
// Print all options
|
||||||
|
for (const common_arg * opt : common_options) {
|
||||||
|
print_fish_option(opt);
|
||||||
|
}
|
||||||
|
for (const common_arg * opt : sparam_options) {
|
||||||
|
print_fish_option(opt);
|
||||||
|
}
|
||||||
|
for (const common_arg * opt : specific_options) {
|
||||||
|
print_fish_option(opt);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add custom file type completions
|
||||||
|
for (const auto& exe : executables) {
|
||||||
|
printf("\n# Custom file completions\n");
|
||||||
|
printf("complete -c %s -s m -l model -rF -a '(for file in *.gguf; echo $file; end)'\n", exe.c_str());
|
||||||
|
printf("complete -c %s -l grammar-file -rF -a '(for file in *.gbnf; echo $file; end)'\n", exe.c_str());
|
||||||
|
printf("complete -c %s -l chat-template-file -rF -a '(for file in *.jinja; echo $file; end)'\n", exe.c_str());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static std::vector<ggml_backend_dev_t> parse_device_list(const std::string & value) {
|
static std::vector<ggml_backend_dev_t> parse_device_list(const std::string & value) {
|
||||||
std::vector<ggml_backend_dev_t> devices;
|
std::vector<ggml_backend_dev_t> devices;
|
||||||
auto dev_names = string_split<std::string>(value, ',');
|
auto dev_names = string_split<std::string>(value, ',');
|
||||||
|
|
@ -1650,6 +1794,10 @@ bool common_params_parse(int argc, char ** argv, common_params & params, llama_e
|
||||||
common_params_print_completion(ctx_arg);
|
common_params_print_completion(ctx_arg);
|
||||||
exit(0);
|
exit(0);
|
||||||
}
|
}
|
||||||
|
if (ctx_arg.params.completion_fish) {
|
||||||
|
common_params_print_completion_fish(ctx_arg);
|
||||||
|
exit(0);
|
||||||
|
}
|
||||||
params.lr.init();
|
params.lr.init();
|
||||||
} catch (const std::invalid_argument & ex) {
|
} catch (const std::invalid_argument & ex) {
|
||||||
fprintf(stderr, "%s\n", ex.what());
|
fprintf(stderr, "%s\n", ex.what());
|
||||||
|
|
@ -1741,6 +1889,13 @@ common_params_context common_params_parser_init(common_params & params, llama_ex
|
||||||
params.completion = true;
|
params.completion = true;
|
||||||
}
|
}
|
||||||
));
|
));
|
||||||
|
add_opt(common_arg(
|
||||||
|
{"--completion-fish"},
|
||||||
|
"print source-able fish completion script for llama.cpp",
|
||||||
|
[](common_params & params) {
|
||||||
|
params.completion_fish = true;
|
||||||
|
}
|
||||||
|
));
|
||||||
add_opt(common_arg(
|
add_opt(common_arg(
|
||||||
{"--verbose-prompt"},
|
{"--verbose-prompt"},
|
||||||
string_format("print a verbose prompt before generation (default: %s)", params.verbose_prompt ? "true" : "false"),
|
string_format("print a verbose prompt before generation (default: %s)", params.verbose_prompt ? "true" : "false"),
|
||||||
|
|
|
||||||
|
|
@ -365,7 +365,8 @@ struct common_params {
|
||||||
bool kl_divergence = false; // compute KL divergence
|
bool kl_divergence = false; // compute KL divergence
|
||||||
|
|
||||||
bool usage = false; // print usage
|
bool usage = false; // print usage
|
||||||
bool completion = false; // print source-able completion script
|
bool completion = false; // print source-able bash completion script
|
||||||
|
bool completion_fish = false; // print source-able fish completion script
|
||||||
bool use_color = false; // use color to distinguish generations and inputs
|
bool use_color = false; // use color to distinguish generations and inputs
|
||||||
bool special = false; // enable special token output
|
bool special = false; // enable special token output
|
||||||
bool interactive = false; // interactive mode
|
bool interactive = false; // interactive mode
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue