mirror of
https://github.com/mudler/LocalAI.git
synced 2025-05-28 14:35:00 +00:00
feat(vulkan): add vulkan support to the llama.cpp backend (#2648)
feat(vulkan): add vulkan support to llama.cpp Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
parent
03b1cf51fd
commit
e84b31935c
4 changed files with 38 additions and 0 deletions
9
.github/workflows/image-pr.yml
vendored
9
.github/workflows/image-pr.yml
vendored
|
@ -128,3 +128,12 @@ jobs:
|
|||
runs-on: 'ubuntu-latest'
|
||||
base-image: "ubuntu:22.04"
|
||||
makeflags: "--jobs=4 --output-sync=target"
|
||||
- build-type: 'vulkan'
|
||||
platforms: 'linux/amd64'
|
||||
tag-latest: 'false'
|
||||
tag-suffix: '-vulkan-ffmpeg-core'
|
||||
ffmpeg: 'true'
|
||||
image-type: 'core'
|
||||
runs-on: 'ubuntu-latest'
|
||||
base-image: "ubuntu:22.04"
|
||||
makeflags: "--jobs=4 --output-sync=target"
|
||||
|
|
9
.github/workflows/image.yml
vendored
9
.github/workflows/image.yml
vendored
|
@ -315,3 +315,12 @@ jobs:
|
|||
runs-on: 'arc-runner-set'
|
||||
base-image: "ubuntu:22.04"
|
||||
makeflags: "--jobs=4 --output-sync=target"
|
||||
- build-type: 'vulkan'
|
||||
platforms: 'linux/amd64,linux/arm64'
|
||||
tag-latest: 'false'
|
||||
tag-suffix: '-vulkan-ffmpeg-core'
|
||||
ffmpeg: 'true'
|
||||
image-type: 'core'
|
||||
runs-on: 'arc-runner-set'
|
||||
base-image: "ubuntu:22.04"
|
||||
makeflags: "--jobs=4 --output-sync=target"
|
Loading…
Add table
Add a link
Reference in a new issue