Revert "feat: add new gpt4all-j binding (#142)"

This reverts commit 92452d46da.
This commit is contained in:
mudler 2023-05-02 15:32:43 +02:00
parent 5378bfd86d
commit a46ffe9a36
4 changed files with 11 additions and 12 deletions

View file

@ -5,7 +5,7 @@ BINARY_NAME=local-ai
# renovate: datasource=github-tags depName=go-skynet/go-llama.cpp # renovate: datasource=github-tags depName=go-skynet/go-llama.cpp
GOLLAMA_VERSION?=llama.cpp-f4cef87 GOLLAMA_VERSION?=llama.cpp-f4cef87
# renovate: datasource=git-refs packageNameTemplate=https://github.com/go-skynet/go-gpt4all-j.cpp currentValueTemplate=master depNameTemplate=go-gpt4all-j.cpp # renovate: datasource=git-refs packageNameTemplate=https://github.com/go-skynet/go-gpt4all-j.cpp currentValueTemplate=master depNameTemplate=go-gpt4all-j.cpp
GOGPT4ALLJ_VERSION?=d57834a2d24e8be64c78b9496a870d18393066fd GOGPT4ALLJ_VERSION?=1f7bff57f66cb7062e40d0ac3abd2217815e5109
# renovate: datasource=git-refs packageNameTemplate=https://github.com/go-skynet/go-gpt2.cpp currentValueTemplate=master depNameTemplate=go-gpt2.cpp # renovate: datasource=git-refs packageNameTemplate=https://github.com/go-skynet/go-gpt2.cpp currentValueTemplate=master depNameTemplate=go-gpt2.cpp
GOGPT2_VERSION?=245a5bfe6708ab80dc5c733dcdbfbe3cfd2acdaa GOGPT2_VERSION?=245a5bfe6708ab80dc5c733dcdbfbe3cfd2acdaa
@ -56,15 +56,9 @@ go-gpt4all-j:
@find ./go-gpt4all-j -type f -name "*.cpp" -exec sed -i'' -e 's/json_/json_gptj_/g' {} + @find ./go-gpt4all-j -type f -name "*.cpp" -exec sed -i'' -e 's/json_/json_gptj_/g' {} +
@find ./go-gpt4all-j -type f -name "*.cpp" -exec sed -i'' -e 's/void replace/void json_gptj_replace/g' {} + @find ./go-gpt4all-j -type f -name "*.cpp" -exec sed -i'' -e 's/void replace/void json_gptj_replace/g' {} +
@find ./go-gpt4all-j -type f -name "*.cpp" -exec sed -i'' -e 's/::replace/::json_gptj_replace/g' {} + @find ./go-gpt4all-j -type f -name "*.cpp" -exec sed -i'' -e 's/::replace/::json_gptj_replace/g' {} +
@find ./go-gpt4all-j -type f -name "*.cpp" -exec sed -i'' -e 's/llama_/gptj_llama_/g' {} +
@find ./go-gpt4all-j -type f -name "*.h" -exec sed -i'' -e 's/llama_/gptj_llama_/g' {} +
@find ./go-gpt4all-j -type f -name "*" -exec sed -i'' -e 's/set_console_color/gptj_set_console_color/g' {} +
@find ./go-gpt4all-j -type f -name "*.txt" -exec sed -i'' -e 's/llama_/gptj_llama_/g' {} +
@find ./go-gpt4all-j -type f -name "Makefile" -exec sed -i'' -e 's/llama_/gptj_llama_/g' {} +
@mv ./go-gpt4all-j/gpt4all-j/llmodel/llama.cpp/llama_util.h ./go-gpt4all-j/gpt4all-j/llmodel/llama.cpp/gptj_llama_util.h
go-gpt4all-j/libgptj.a: go-gpt4all-j go-gpt4all-j/libgptj.a: go-gpt4all-j
$(MAKE) -C go-gpt4all-j $(GENERIC_PREFIX)libgptj.a example $(MAKE) -C go-gpt4all-j $(GENERIC_PREFIX)libgptj.a
# CEREBRAS GPT # CEREBRAS GPT
go-gpt2: go-gpt2:

View file

@ -37,7 +37,7 @@ func ModelInference(s string, loader *model.ModelLoader, c Config) (func() (stri
// TODO: this is ugly, better identifying the model somehow! however, it is a good stab for a first implementation.. // TODO: this is ugly, better identifying the model somehow! however, it is a good stab for a first implementation..
model, llamaerr = loader.LoadLLaMAModel(modelFile, llamaOpts...) model, llamaerr = loader.LoadLLaMAModel(modelFile, llamaOpts...)
if llamaerr != nil { if llamaerr != nil {
gptModel, gptjerr = loader.LoadGPTJModel(modelFile, gptj.SetThreads(c.Threads)) gptModel, gptjerr = loader.LoadGPTJModel(modelFile)
if gptjerr != nil { if gptjerr != nil {
gpt2Model, gpt2err = loader.LoadGPT2Model(modelFile) gpt2Model, gpt2err = loader.LoadGPT2Model(modelFile)
if gpt2err != nil { if gpt2err != nil {
@ -108,12 +108,17 @@ func ModelInference(s string, loader *model.ModelLoader, c Config) (func() (stri
gptj.SetTopP(c.TopP), gptj.SetTopP(c.TopP),
gptj.SetTopK(c.TopK), gptj.SetTopK(c.TopK),
gptj.SetTokens(c.Maxtokens), gptj.SetTokens(c.Maxtokens),
gptj.SetThreads(c.Threads),
} }
if c.Batch != 0 { if c.Batch != 0 {
predictOptions = append(predictOptions, gptj.SetBatch(c.Batch)) predictOptions = append(predictOptions, gptj.SetBatch(c.Batch))
} }
if c.Seed != 0 {
predictOptions = append(predictOptions, gptj.SetSeed(c.Seed))
}
return gptModel.Predict( return gptModel.Predict(
s, s,
predictOptions..., predictOptions...,

2
go.mod
View file

@ -4,7 +4,7 @@ go 1.19
require ( require (
github.com/go-skynet/go-gpt2.cpp v0.0.0-20230422085954-245a5bfe6708 github.com/go-skynet/go-gpt2.cpp v0.0.0-20230422085954-245a5bfe6708
github.com/go-skynet/go-gpt4all-j.cpp v0.0.0-20230501160437-8417608f0e94 github.com/go-skynet/go-gpt4all-j.cpp v0.0.0-20230422090028-1f7bff57f66c
github.com/go-skynet/go-llama.cpp v0.0.0-20230430075552-377fd245eae2 github.com/go-skynet/go-llama.cpp v0.0.0-20230430075552-377fd245eae2
github.com/gofiber/fiber/v2 v2.44.0 github.com/gofiber/fiber/v2 v2.44.0
github.com/jaypipes/ghw v0.10.0 github.com/jaypipes/ghw v0.10.0

View file

@ -193,7 +193,7 @@ func (ml *ModelLoader) LoadGPT2Model(modelName string) (*gpt2.GPT2, error) {
return model, err return model, err
} }
func (ml *ModelLoader) LoadGPTJModel(modelName string, opts ...gptj.ModelOption) (*gptj.GPTJ, error) { func (ml *ModelLoader) LoadGPTJModel(modelName string) (*gptj.GPTJ, error) {
ml.mu.Lock() ml.mu.Lock()
defer ml.mu.Unlock() defer ml.mu.Unlock()
@ -222,7 +222,7 @@ func (ml *ModelLoader) LoadGPTJModel(modelName string, opts ...gptj.ModelOption)
modelFile := filepath.Join(ml.ModelPath, modelName) modelFile := filepath.Join(ml.ModelPath, modelName)
log.Debug().Msgf("Loading model in memory from file: %s", modelFile) log.Debug().Msgf("Loading model in memory from file: %s", modelFile)
model, err := gptj.New(modelFile, opts...) model, err := gptj.New(modelFile)
if err != nil { if err != nil {
return nil, err return nil, err
} }