mirror of
https://github.com/mudler/LocalAI.git
synced 2025-05-24 12:35:00 +00:00
fix(llama): resolve lora adapters correctly from the model file (#964)
**Description** we were otherwise expecting absolute paths. this make it relative to the model file (as someone would expect) **Notes for Reviewers** **[Signed commits](../CONTRIBUTING.md#signing-off-on-commits-developer-certificate-of-origin)** - [ ] Yes, I signed my commits. <!-- Thank you for contributing to LocalAI! Contributing Conventions: 1. Include descriptive PR titles with [<component-name>] prepended. 2. Build and test your changes before submitting a PR. 3. Sign your commits By following the community's contribution conventions upfront, the review process will be accelerated and your PR merged more quickly. -->
This commit is contained in:
parent
02704e38d3
commit
3bab307904
1 changed files with 6 additions and 2 deletions
|
@ -4,6 +4,7 @@ package llama
|
||||||
// It is meant to be used by the main executable that is the server for the specific backend type (falcon, gpt3, etc)
|
// It is meant to be used by the main executable that is the server for the specific backend type (falcon, gpt3, etc)
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"path/filepath"
|
||||||
|
|
||||||
"github.com/go-skynet/LocalAI/pkg/grpc/base"
|
"github.com/go-skynet/LocalAI/pkg/grpc/base"
|
||||||
pb "github.com/go-skynet/LocalAI/pkg/grpc/proto"
|
pb "github.com/go-skynet/LocalAI/pkg/grpc/proto"
|
||||||
|
@ -36,12 +37,15 @@ func (llm *LLM) Load(opts *pb.ModelOptions) error {
|
||||||
llamaOpts = append(llamaOpts, llama.SetMulMatQ(false))
|
llamaOpts = append(llamaOpts, llama.SetMulMatQ(false))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Get base path of opts.ModelFile and use the same for lora (assume the same path)
|
||||||
|
basePath := filepath.Dir(opts.ModelFile)
|
||||||
|
|
||||||
if opts.LoraAdapter != "" {
|
if opts.LoraAdapter != "" {
|
||||||
llamaOpts = append(llamaOpts, llama.SetLoraAdapter(opts.LoraAdapter))
|
llamaOpts = append(llamaOpts, llama.SetLoraAdapter(filepath.Join(basePath, opts.LoraAdapter)))
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.LoraBase != "" {
|
if opts.LoraBase != "" {
|
||||||
llamaOpts = append(llamaOpts, llama.SetLoraBase(opts.LoraBase))
|
llamaOpts = append(llamaOpts, llama.SetLoraBase(filepath.Join(basePath, opts.LoraBase)))
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.ContextSize != 0 {
|
if opts.ContextSize != 0 {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue