mirror of
https://github.com/mudler/LocalAI.git
synced 2025-05-20 10:35:01 +00:00
* feat: migrate to alecthomas/kong for CLI Signed-off-by: Chris Jowett <421501+cryptk@users.noreply.github.com> * feat: bring in new flag for granular log levels Signed-off-by: Chris Jowett <421501+cryptk@users.noreply.github.com> * chore: go mod tidy Signed-off-by: Chris Jowett <421501+cryptk@users.noreply.github.com> * feat: allow loading cli flag values from ["./localai.yaml", "~/.config/localai.yaml", "/etc/localai.yaml"] in that order Signed-off-by: Chris Jowett <421501+cryptk@users.noreply.github.com> * feat: load from .env file instead of a yaml file Signed-off-by: Chris Jowett <421501+cryptk@users.noreply.github.com> * feat: better loading for environment files Signed-off-by: Chris Jowett <421501+cryptk@users.noreply.github.com> * feat(doc): add initial documentation about configuration Signed-off-by: Chris Jowett <421501+cryptk@users.noreply.github.com> * fix: remove test log lines Signed-off-by: Chris Jowett <421501+cryptk@users.noreply.github.com> * feat: integrate new documentation into existing pages Signed-off-by: Chris Jowett <421501+cryptk@users.noreply.github.com> * feat: add documentation on .env files Signed-off-by: Chris Jowett <421501+cryptk@users.noreply.github.com> * fix: cleanup some documentation table errors Signed-off-by: Chris Jowett <421501+cryptk@users.noreply.github.com> * feat: refactor CLI logic out to it's own package under core/cli Signed-off-by: Chris Jowett <421501+cryptk@users.noreply.github.com> --------- Signed-off-by: Chris Jowett <421501+cryptk@users.noreply.github.com>
This commit is contained in:
parent
92005b9c02
commit
24d7dadfed
10 changed files with 552 additions and 623 deletions
54
core/cli/transcript.go
Normal file
54
core/cli/transcript.go
Normal file
|
@ -0,0 +1,54 @@
|
|||
package cli
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/go-skynet/LocalAI/core/backend"
|
||||
"github.com/go-skynet/LocalAI/core/config"
|
||||
"github.com/go-skynet/LocalAI/pkg/model"
|
||||
)
|
||||
|
||||
type TranscriptCMD struct {
|
||||
Filename string `arg:""`
|
||||
|
||||
Backend string `short:"b" default:"whisper" help:"Backend to run the transcription model"`
|
||||
Model string `short:"m" required:"" help:"Model name to run the TTS"`
|
||||
Language string `short:"l" help:"Language of the audio file"`
|
||||
Threads int `short:"t" default:"1" help:"Number of threads used for parallel computation"`
|
||||
ModelsPath string `env:"LOCALAI_MODELS_PATH,MODELS_PATH" type:"path" default:"${basepath}/models" help:"Path containing models used for inferencing" group:"storage"`
|
||||
BackendAssetsPath string `env:"LOCALAI_BACKEND_ASSETS_PATH,BACKEND_ASSETS_PATH" type:"path" default:"/tmp/localai/backend_data" help:"Path used to extract libraries that are required by some of the backends in runtime" group:"storage"`
|
||||
}
|
||||
|
||||
func (t *TranscriptCMD) Run(ctx *Context) error {
|
||||
opts := &config.ApplicationConfig{
|
||||
ModelPath: t.ModelsPath,
|
||||
Context: context.Background(),
|
||||
AssetsDestination: t.BackendAssetsPath,
|
||||
}
|
||||
|
||||
cl := config.NewBackendConfigLoader()
|
||||
ml := model.NewModelLoader(opts.ModelPath)
|
||||
if err := cl.LoadBackendConfigsFromPath(t.ModelsPath); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
c, exists := cl.GetBackendConfig(t.Model)
|
||||
if !exists {
|
||||
return errors.New("model not found")
|
||||
}
|
||||
|
||||
c.Threads = &t.Threads
|
||||
|
||||
defer ml.StopAllGRPC()
|
||||
|
||||
tr, err := backend.ModelTranscription(t.Filename, t.Language, ml, c, opts)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, segment := range tr.Segments {
|
||||
fmt.Println(segment.Start.String(), "-", segment.Text)
|
||||
}
|
||||
return nil
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue