fix: rename Dockerfile.go --> Dockerfile.golang to avoid IDE errors (#5892)

extract up and out Dockerfile.go --> Dockerfile.golang rename. Prevents syntax highlighting and IDE errors

Signed-off-by: Dave Lee <dave@gray101.com>
This commit is contained in:
Dave
2025-07-23 15:33:26 -04:00
committed by GitHub
parent b7b3164736
commit 9cecf5e7ac
4 changed files with 28 additions and 27 deletions

View File

@@ -597,7 +597,7 @@ jobs:
base-image: "ubuntu:22.04"
skip-drivers: 'false'
backend: "piper"
dockerfile: "./backend/Dockerfile.go"
dockerfile: "./backend/Dockerfile.golang"
context: "./"
# bark-cpp
- build-type: ''
@@ -610,7 +610,7 @@ jobs:
base-image: "ubuntu:22.04"
skip-drivers: 'false'
backend: "bark-cpp"
dockerfile: "./backend/Dockerfile.go"
dockerfile: "./backend/Dockerfile.golang"
context: "./"
- build-type: ''
cuda-major-version: ""
@@ -659,7 +659,7 @@ jobs:
base-image: "ubuntu:22.04"
skip-drivers: 'false'
backend: "stablediffusion-ggml"
dockerfile: "./backend/Dockerfile.go"
dockerfile: "./backend/Dockerfile.golang"
context: "./"
- build-type: 'cublas'
cuda-major-version: "12"
@@ -671,7 +671,7 @@ jobs:
base-image: "ubuntu:22.04"
skip-drivers: 'false'
backend: "stablediffusion-ggml"
dockerfile: "./backend/Dockerfile.go"
dockerfile: "./backend/Dockerfile.golang"
context: "./"
- build-type: 'cublas'
cuda-major-version: "11"
@@ -683,7 +683,7 @@ jobs:
base-image: "ubuntu:22.04"
skip-drivers: 'false'
backend: "stablediffusion-ggml"
dockerfile: "./backend/Dockerfile.go"
dockerfile: "./backend/Dockerfile.golang"
context: "./"
- build-type: 'sycl_f32'
cuda-major-version: ""
@@ -695,7 +695,7 @@ jobs:
base-image: "quay.io/go-skynet/intel-oneapi-base:latest"
skip-drivers: 'false'
backend: "stablediffusion-ggml"
dockerfile: "./backend/Dockerfile.go"
dockerfile: "./backend/Dockerfile.golang"
context: "./"
- build-type: 'sycl_f16'
cuda-major-version: ""
@@ -707,7 +707,7 @@ jobs:
base-image: "quay.io/go-skynet/intel-oneapi-base:latest"
skip-drivers: 'false'
backend: "stablediffusion-ggml"
dockerfile: "./backend/Dockerfile.go"
dockerfile: "./backend/Dockerfile.golang"
context: "./"
- build-type: 'vulkan'
cuda-major-version: ""
@@ -719,7 +719,7 @@ jobs:
base-image: "ubuntu:22.04"
skip-drivers: 'false'
backend: "stablediffusion-ggml"
dockerfile: "./backend/Dockerfile.go"
dockerfile: "./backend/Dockerfile.golang"
context: "./"
- build-type: 'cublas'
cuda-major-version: "12"
@@ -731,7 +731,7 @@ jobs:
base-image: "nvcr.io/nvidia/l4t-jetpack:r36.4.0"
runs-on: 'ubuntu-24.04-arm'
backend: "stablediffusion-ggml"
dockerfile: "./backend/Dockerfile.go"
dockerfile: "./backend/Dockerfile.golang"
context: "./"
# whisper
- build-type: ''
@@ -744,7 +744,7 @@ jobs:
base-image: "ubuntu:22.04"
skip-drivers: 'false'
backend: "whisper"
dockerfile: "./backend/Dockerfile.go"
dockerfile: "./backend/Dockerfile.golang"
context: "./"
- build-type: 'cublas'
cuda-major-version: "12"
@@ -756,7 +756,7 @@ jobs:
base-image: "ubuntu:22.04"
skip-drivers: 'false'
backend: "whisper"
dockerfile: "./backend/Dockerfile.go"
dockerfile: "./backend/Dockerfile.golang"
context: "./"
- build-type: 'cublas'
cuda-major-version: "11"
@@ -768,7 +768,7 @@ jobs:
base-image: "ubuntu:22.04"
skip-drivers: 'false'
backend: "whisper"
dockerfile: "./backend/Dockerfile.go"
dockerfile: "./backend/Dockerfile.golang"
context: "./"
- build-type: 'sycl_f32'
cuda-major-version: ""
@@ -780,7 +780,7 @@ jobs:
base-image: "quay.io/go-skynet/intel-oneapi-base:latest"
skip-drivers: 'false'
backend: "whisper"
dockerfile: "./backend/Dockerfile.go"
dockerfile: "./backend/Dockerfile.golang"
context: "./"
- build-type: 'sycl_f16'
cuda-major-version: ""
@@ -792,7 +792,7 @@ jobs:
base-image: "quay.io/go-skynet/intel-oneapi-base:latest"
skip-drivers: 'false'
backend: "whisper"
dockerfile: "./backend/Dockerfile.go"
dockerfile: "./backend/Dockerfile.golang"
context: "./"
- build-type: 'vulkan'
cuda-major-version: ""
@@ -804,7 +804,7 @@ jobs:
base-image: "ubuntu:22.04"
skip-drivers: 'false'
backend: "whisper"
dockerfile: "./backend/Dockerfile.go"
dockerfile: "./backend/Dockerfile.golang"
context: "./"
- build-type: 'cublas'
cuda-major-version: "12"
@@ -816,7 +816,7 @@ jobs:
base-image: "nvcr.io/nvidia/l4t-jetpack:r36.4.0"
runs-on: 'ubuntu-24.04-arm'
backend: "whisper"
dockerfile: "./backend/Dockerfile.go"
dockerfile: "./backend/Dockerfile.golang"
context: "./"
- build-type: 'hipblas'
cuda-major-version: ""
@@ -828,7 +828,7 @@ jobs:
runs-on: 'ubuntu-latest'
skip-drivers: 'false'
backend: "whisper"
dockerfile: "./backend/Dockerfile.go"
dockerfile: "./backend/Dockerfile.golang"
context: "./"
#silero-vad
- build-type: ''
@@ -841,7 +841,7 @@ jobs:
base-image: "ubuntu:22.04"
skip-drivers: 'false'
backend: "silero-vad"
dockerfile: "./backend/Dockerfile.go"
dockerfile: "./backend/Dockerfile.golang"
context: "./"
# local-store
- build-type: ''
@@ -854,7 +854,7 @@ jobs:
base-image: "ubuntu:22.04"
skip-drivers: 'false'
backend: "local-store"
dockerfile: "./backend/Dockerfile.go"
dockerfile: "./backend/Dockerfile.golang"
context: "./"
# huggingface
- build-type: ''
@@ -867,7 +867,7 @@ jobs:
base-image: "ubuntu:22.04"
skip-drivers: 'false'
backend: "huggingface"
dockerfile: "./backend/Dockerfile.go"
dockerfile: "./backend/Dockerfile.golang"
context: "./"
llama-cpp-darwin:
runs-on: macOS-14

View File

@@ -452,16 +452,16 @@ docker-build-llama-cpp:
docker build --build-arg BUILD_TYPE=$(BUILD_TYPE) --build-arg IMAGE_BASE=$(IMAGE_BASE) -t local-ai-backend:llama-cpp -f backend/Dockerfile.llama-cpp .
docker-build-bark-cpp:
docker build -t local-ai-backend:bark-cpp -f backend/Dockerfile.go --build-arg BACKEND=bark-cpp .
docker build -t local-ai-backend:bark-cpp -f backend/Dockerfile.golang --build-arg BACKEND=bark-cpp .
docker-build-piper:
docker build -t local-ai-backend:piper -f backend/Dockerfile.go --build-arg BACKEND=piper .
docker build -t local-ai-backend:piper -f backend/Dockerfile.golang --build-arg BACKEND=piper .
docker-build-local-store:
docker build -t local-ai-backend:local-store -f backend/Dockerfile.go --build-arg BACKEND=local-store .
docker build -t local-ai-backend:local-store -f backend/Dockerfile.golang --build-arg BACKEND=local-store .
docker-build-huggingface:
docker build -t local-ai-backend:huggingface -f backend/Dockerfile.go --build-arg BACKEND=huggingface .
docker build -t local-ai-backend:huggingface -f backend/Dockerfile.golang --build-arg BACKEND=huggingface .
docker-save-huggingface: backend-images
docker save local-ai-backend:huggingface -o backend-images/huggingface.tar
@@ -470,7 +470,7 @@ docker-save-local-store: backend-images
docker save local-ai-backend:local-store -o backend-images/local-store.tar
docker-build-silero-vad:
docker build -t local-ai-backend:silero-vad -f backend/Dockerfile.go --build-arg BACKEND=silero-vad .
docker build -t local-ai-backend:silero-vad -f backend/Dockerfile.golang --build-arg BACKEND=silero-vad .
docker-save-silero-vad: backend-images
docker save local-ai-backend:silero-vad -o backend-images/silero-vad.tar
@@ -485,7 +485,7 @@ docker-save-bark-cpp: backend-images
docker save local-ai-backend:bark-cpp -o backend-images/bark-cpp.tar
docker-build-stablediffusion-ggml:
docker build -t local-ai-backend:stablediffusion-ggml -f backend/Dockerfile.go --build-arg BACKEND=stablediffusion-ggml .
docker build -t local-ai-backend:stablediffusion-ggml -f backend/Dockerfile.golang --build-arg BACKEND=stablediffusion-ggml .
docker-save-stablediffusion-ggml: backend-images
docker save local-ai-backend:stablediffusion-ggml -o backend-images/stablediffusion-ggml.tar
@@ -506,7 +506,7 @@ docker-build-kokoro:
docker build -t local-ai-backend:kokoro -f backend/Dockerfile.python --build-arg BACKEND=kokoro .
docker-build-whisper:
docker build --build-arg BUILD_TYPE=$(BUILD_TYPE) --build-arg BASE_IMAGE=$(BASE_IMAGE) -t local-ai-backend:whisper -f backend/Dockerfile.go --build-arg BACKEND=whisper .
docker build --build-arg BUILD_TYPE=$(BUILD_TYPE) --build-arg BASE_IMAGE=$(BASE_IMAGE) -t local-ai-backend:whisper -f backend/Dockerfile.golang --build-arg BACKEND=whisper .
docker-save-whisper: backend-images
docker save local-ai-backend:whisper -o backend-images/whisper.tar

View File

@@ -278,6 +278,7 @@ func ensureService(ctx context.Context, n *node.Node, nd *NodeData, sserv string
port, err := freeport.GetFreePort()
if err != nil {
zlog.Error().Err(err).Msgf("Could not allocate a free port for %s", nd.ID)
cancel()
return
}