mirror of
https://github.com/mudler/LocalAI.git
synced 2026-01-01 07:01:09 -06:00
* chore: allow to install with pip Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * WIP Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Make the backend to build and actually work Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * List models from system only Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Add script to build darwin python backends Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Run protogen in libbackend Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Detect if mps is available across python backends Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * CI: try to build backend Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Debug CI Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Fixups Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Fixups Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Index mlx-vlm Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Remove mlx-vlm Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Drop CI test Signed-off-by: Ettore Di Giacinto <mudler@localai.io> --------- Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
21 lines
472 B
Bash
21 lines
472 B
Bash
#!/bin/bash
|
|
|
|
set -ex
|
|
|
|
IMAGE_NAME="${IMAGE_NAME:-localai/llama-cpp-darwin}"
|
|
mkdir -p backend-images
|
|
make -C backend/python/${BACKEND}
|
|
|
|
cp -rfv backend/python/common backend/python/${BACKEND}/
|
|
|
|
PLATFORMARCH="${PLATFORMARCH:-darwin/arm64}"
|
|
|
|
./local-ai util create-oci-image \
|
|
backend/python/${BACKEND}/. \
|
|
--output ./backend-images/${BACKEND}.tar \
|
|
--image-name $IMAGE_NAME \
|
|
--platform $PLATFORMARCH
|
|
|
|
make -C backend/python/${BACKEND} clean
|
|
|