feat(tts): add Elevenlabs and OpenAI TTS compatibility layer (#1834)

* feat(elevenlabs): map elevenlabs API support to TTS

This allows elevenlabs Clients to work automatically with LocalAI by
supporting the elevenlabs API.

The elevenlabs server endpoint is implemented such as it is wired to the
TTS endpoints.

Fixes: https://github.com/mudler/LocalAI/issues/1809

* feat(openai/tts): compat layer with openai tts

Fixes: #1276

* fix: adapt tts CLI
This commit is contained in:
Ettore Di Giacinto 2024-03-14 23:08:34 +01:00 committed by GitHub
parent 45d520f913
commit 20136ca8b7
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
24 changed files with 454 additions and 338 deletions

10
main.go
View file

@ -50,7 +50,7 @@ func main() {
app := &cli.App{
Name: "LocalAI",
Version: internal.PrintableVersion(),
Usage: "OpenAI compatible API for running LLaMA/GPT models locally on CPU with consumer grade hardware.",
Usage: "OpenAI, OSS alternative. Drop-in compatible API for running LLM, GPT and genAI models locally on CPU, GPUs with consumer grade hardware. Supported server endpoints: OpenAI, Elevenlabs",
Flags: []cli.Flag{
&cli.BoolFlag{
Name: "f16",
@ -394,6 +394,12 @@ For a list of compatible model, check out: https://localai.io/model-compatibilit
Usage: "Model name to run the TTS",
Required: true,
},
&cli.StringFlag{
Name: "voice",
Aliases: []string{"v"},
Usage: "Voice name to run the TTS (optional)",
Required: true,
},
&cli.StringFlag{
Name: "output-file",
Aliases: []string{"o"},
@ -427,7 +433,7 @@ For a list of compatible model, check out: https://localai.io/model-compatibilit
defer ml.StopAllGRPC()
filePath, _, err := backend.ModelTTS(backendOption, text, modelOption, ml, opts, config.BackendConfig{})
filePath, _, err := backend.ModelTTS(backendOption, text, modelOption, ctx.String("voice"), ml, opts, config.BackendConfig{})
if err != nil {
return err
}