mirror of
https://github.com/mudler/LocalAI.git
synced 2025-05-20 02:24:59 +00:00
embedding: do not use oai type
Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
parent
1f536c5ed7
commit
3d397d8aab
1 changed files with 3 additions and 1 deletions
|
@ -4261,9 +4261,11 @@ public:
|
||||||
|
|
||||||
body["stream"] = false;
|
body["stream"] = false;
|
||||||
|
|
||||||
|
/*
|
||||||
if (llama_pooling_type(ctx_server.ctx) == LLAMA_POOLING_TYPE_NONE) {
|
if (llama_pooling_type(ctx_server.ctx) == LLAMA_POOLING_TYPE_NONE) {
|
||||||
return grpc::Status(grpc::StatusCode::INVALID_ARGUMENT, "Pooling type 'none' is not OAI compatible. Please use a different pooling type");
|
return grpc::Status(grpc::StatusCode::INVALID_ARGUMENT, "Pooling type 'none' is not OAI compatible. Please use a different pooling type");
|
||||||
}
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
// for the shape of input/content, see tokenize_input_prompts()
|
// for the shape of input/content, see tokenize_input_prompts()
|
||||||
json prompt = body.at("prompt");
|
json prompt = body.at("prompt");
|
||||||
|
@ -4291,7 +4293,7 @@ public:
|
||||||
task.prompt_tokens = server_tokens(tokenized_prompts[i], ctx_server.mctx != nullptr);
|
task.prompt_tokens = server_tokens(tokenized_prompts[i], ctx_server.mctx != nullptr);
|
||||||
|
|
||||||
// OAI-compat
|
// OAI-compat
|
||||||
task.params.oaicompat = OAICOMPAT_TYPE_EMBEDDING;
|
task.params.oaicompat = OAICOMPAT_TYPE_NONE;
|
||||||
|
|
||||||
tasks.push_back(std::move(task));
|
tasks.push_back(std::move(task));
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue