mirror of
https://github.com/mudler/LocalAI.git
synced 2025-12-19 00:10:29 -06:00
feat(importers): Add diffuser backend importer with ginkgo tests and UI support (#7316)
* Initial plan * Add diffuser backend importer with ginkgo tests Co-authored-by: mudler <2420543+mudler@users.noreply.github.com> * Finalize diffuser backend importer implementation Co-authored-by: mudler <2420543+mudler@users.noreply.github.com> * Add diffuser preferences to model-editor import section Co-authored-by: mudler <2420543+mudler@users.noreply.github.com> * Use gopkg.in/yaml.v3 for consistency in diffuser importer Co-authored-by: mudler <2420543+mudler@users.noreply.github.com> --------- Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: mudler <2420543+mudler@users.noreply.github.com>
This commit is contained in:
121
core/gallery/importers/diffuser.go
Normal file
121
core/gallery/importers/diffuser.go
Normal file
@@ -0,0 +1,121 @@
|
||||
package importers
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/mudler/LocalAI/core/config"
|
||||
"github.com/mudler/LocalAI/core/gallery"
|
||||
"github.com/mudler/LocalAI/core/schema"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
var _ Importer = &DiffuserImporter{}
|
||||
|
||||
type DiffuserImporter struct{}
|
||||
|
||||
func (i *DiffuserImporter) Match(details Details) bool {
|
||||
preferences, err := details.Preferences.MarshalJSON()
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
preferencesMap := make(map[string]any)
|
||||
err = json.Unmarshal(preferences, &preferencesMap)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
b, ok := preferencesMap["backend"].(string)
|
||||
if ok && b == "diffusers" {
|
||||
return true
|
||||
}
|
||||
|
||||
if details.HuggingFace != nil {
|
||||
for _, file := range details.HuggingFace.Files {
|
||||
if strings.Contains(file.Path, "model_index.json") ||
|
||||
strings.Contains(file.Path, "scheduler/scheduler_config.json") {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (i *DiffuserImporter) Import(details Details) (gallery.ModelConfig, error) {
|
||||
preferences, err := details.Preferences.MarshalJSON()
|
||||
if err != nil {
|
||||
return gallery.ModelConfig{}, err
|
||||
}
|
||||
preferencesMap := make(map[string]any)
|
||||
err = json.Unmarshal(preferences, &preferencesMap)
|
||||
if err != nil {
|
||||
return gallery.ModelConfig{}, err
|
||||
}
|
||||
|
||||
name, ok := preferencesMap["name"].(string)
|
||||
if !ok {
|
||||
name = filepath.Base(details.URI)
|
||||
}
|
||||
|
||||
description, ok := preferencesMap["description"].(string)
|
||||
if !ok {
|
||||
description = "Imported from " + details.URI
|
||||
}
|
||||
|
||||
backend := "diffusers"
|
||||
b, ok := preferencesMap["backend"].(string)
|
||||
if ok {
|
||||
backend = b
|
||||
}
|
||||
|
||||
pipelineType, ok := preferencesMap["pipeline_type"].(string)
|
||||
if !ok {
|
||||
pipelineType = "StableDiffusionPipeline"
|
||||
}
|
||||
|
||||
schedulerType, ok := preferencesMap["scheduler_type"].(string)
|
||||
if !ok {
|
||||
schedulerType = ""
|
||||
}
|
||||
|
||||
enableParameters, ok := preferencesMap["enable_parameters"].(string)
|
||||
if !ok {
|
||||
enableParameters = "negative_prompt,num_inference_steps"
|
||||
}
|
||||
|
||||
cuda := false
|
||||
if cudaVal, ok := preferencesMap["cuda"].(bool); ok {
|
||||
cuda = cudaVal
|
||||
}
|
||||
|
||||
modelConfig := config.ModelConfig{
|
||||
Name: name,
|
||||
Description: description,
|
||||
KnownUsecaseStrings: []string{"image"},
|
||||
Backend: backend,
|
||||
PredictionOptions: schema.PredictionOptions{
|
||||
BasicModelRequest: schema.BasicModelRequest{
|
||||
Model: details.URI,
|
||||
},
|
||||
},
|
||||
Diffusers: config.Diffusers{
|
||||
PipelineType: pipelineType,
|
||||
SchedulerType: schedulerType,
|
||||
EnableParameters: enableParameters,
|
||||
CUDA: cuda,
|
||||
},
|
||||
}
|
||||
|
||||
data, err := yaml.Marshal(modelConfig)
|
||||
if err != nil {
|
||||
return gallery.ModelConfig{}, err
|
||||
}
|
||||
|
||||
return gallery.ModelConfig{
|
||||
Name: name,
|
||||
Description: description,
|
||||
ConfigFile: string(data),
|
||||
}, nil
|
||||
}
|
||||
246
core/gallery/importers/diffuser_test.go
Normal file
246
core/gallery/importers/diffuser_test.go
Normal file
@@ -0,0 +1,246 @@
|
||||
package importers_test
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"github.com/mudler/LocalAI/core/gallery/importers"
|
||||
. "github.com/mudler/LocalAI/core/gallery/importers"
|
||||
hfapi "github.com/mudler/LocalAI/pkg/huggingface-api"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("DiffuserImporter", func() {
|
||||
var importer *DiffuserImporter
|
||||
|
||||
BeforeEach(func() {
|
||||
importer = &DiffuserImporter{}
|
||||
})
|
||||
|
||||
Context("Match", func() {
|
||||
It("should match when backend preference is diffusers", func() {
|
||||
preferences := json.RawMessage(`{"backend": "diffusers"}`)
|
||||
details := Details{
|
||||
URI: "https://example.com/model",
|
||||
Preferences: preferences,
|
||||
}
|
||||
|
||||
result := importer.Match(details)
|
||||
Expect(result).To(BeTrue())
|
||||
})
|
||||
|
||||
It("should match when HuggingFace details contain model_index.json", func() {
|
||||
hfDetails := &hfapi.ModelDetails{
|
||||
Files: []hfapi.ModelFile{
|
||||
{Path: "model_index.json"},
|
||||
},
|
||||
}
|
||||
details := Details{
|
||||
URI: "https://huggingface.co/test/model",
|
||||
HuggingFace: hfDetails,
|
||||
}
|
||||
|
||||
result := importer.Match(details)
|
||||
Expect(result).To(BeTrue())
|
||||
})
|
||||
|
||||
It("should match when HuggingFace details contain scheduler config", func() {
|
||||
hfDetails := &hfapi.ModelDetails{
|
||||
Files: []hfapi.ModelFile{
|
||||
{Path: "scheduler/scheduler_config.json"},
|
||||
},
|
||||
}
|
||||
details := Details{
|
||||
URI: "https://huggingface.co/test/model",
|
||||
HuggingFace: hfDetails,
|
||||
}
|
||||
|
||||
result := importer.Match(details)
|
||||
Expect(result).To(BeTrue())
|
||||
})
|
||||
|
||||
It("should not match when URI has no diffuser files and no backend preference", func() {
|
||||
details := Details{
|
||||
URI: "https://example.com/model.bin",
|
||||
}
|
||||
|
||||
result := importer.Match(details)
|
||||
Expect(result).To(BeFalse())
|
||||
})
|
||||
|
||||
It("should not match when backend preference is different", func() {
|
||||
preferences := json.RawMessage(`{"backend": "llama-cpp"}`)
|
||||
details := Details{
|
||||
URI: "https://example.com/model",
|
||||
Preferences: preferences,
|
||||
}
|
||||
|
||||
result := importer.Match(details)
|
||||
Expect(result).To(BeFalse())
|
||||
})
|
||||
|
||||
It("should return false when JSON preferences are invalid", func() {
|
||||
preferences := json.RawMessage(`invalid json`)
|
||||
details := Details{
|
||||
URI: "https://example.com/model",
|
||||
Preferences: preferences,
|
||||
}
|
||||
|
||||
result := importer.Match(details)
|
||||
Expect(result).To(BeFalse())
|
||||
})
|
||||
})
|
||||
|
||||
Context("Import", func() {
|
||||
It("should import model config with default name and description", func() {
|
||||
details := Details{
|
||||
URI: "https://huggingface.co/test/my-diffuser-model",
|
||||
}
|
||||
|
||||
modelConfig, err := importer.Import(details)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(modelConfig.Name).To(Equal("my-diffuser-model"))
|
||||
Expect(modelConfig.Description).To(Equal("Imported from https://huggingface.co/test/my-diffuser-model"))
|
||||
Expect(modelConfig.ConfigFile).To(ContainSubstring("backend: diffusers"))
|
||||
Expect(modelConfig.ConfigFile).To(ContainSubstring("model: https://huggingface.co/test/my-diffuser-model"))
|
||||
Expect(modelConfig.ConfigFile).To(ContainSubstring("pipeline_type: StableDiffusionPipeline"))
|
||||
Expect(modelConfig.ConfigFile).To(ContainSubstring("enable_parameters: negative_prompt,num_inference_steps"))
|
||||
})
|
||||
|
||||
It("should import model config with custom name and description from preferences", func() {
|
||||
preferences := json.RawMessage(`{"name": "custom-diffuser", "description": "Custom diffuser model"}`)
|
||||
details := Details{
|
||||
URI: "https://huggingface.co/test/my-model",
|
||||
Preferences: preferences,
|
||||
}
|
||||
|
||||
modelConfig, err := importer.Import(details)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(modelConfig.Name).To(Equal("custom-diffuser"))
|
||||
Expect(modelConfig.Description).To(Equal("Custom diffuser model"))
|
||||
})
|
||||
|
||||
It("should use custom pipeline_type from preferences", func() {
|
||||
preferences := json.RawMessage(`{"pipeline_type": "StableDiffusion3Pipeline"}`)
|
||||
details := Details{
|
||||
URI: "https://huggingface.co/test/my-model",
|
||||
Preferences: preferences,
|
||||
}
|
||||
|
||||
modelConfig, err := importer.Import(details)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(modelConfig.ConfigFile).To(ContainSubstring("pipeline_type: StableDiffusion3Pipeline"))
|
||||
})
|
||||
|
||||
It("should use default pipeline_type when not specified", func() {
|
||||
details := Details{
|
||||
URI: "https://huggingface.co/test/my-model",
|
||||
}
|
||||
|
||||
modelConfig, err := importer.Import(details)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(modelConfig.ConfigFile).To(ContainSubstring("pipeline_type: StableDiffusionPipeline"))
|
||||
})
|
||||
|
||||
It("should use custom scheduler_type from preferences", func() {
|
||||
preferences := json.RawMessage(`{"scheduler_type": "k_dpmpp_2m"}`)
|
||||
details := Details{
|
||||
URI: "https://huggingface.co/test/my-model",
|
||||
Preferences: preferences,
|
||||
}
|
||||
|
||||
modelConfig, err := importer.Import(details)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(modelConfig.ConfigFile).To(ContainSubstring("scheduler_type: k_dpmpp_2m"))
|
||||
})
|
||||
|
||||
It("should use cuda setting from preferences", func() {
|
||||
preferences := json.RawMessage(`{"cuda": true}`)
|
||||
details := Details{
|
||||
URI: "https://huggingface.co/test/my-model",
|
||||
Preferences: preferences,
|
||||
}
|
||||
|
||||
modelConfig, err := importer.Import(details)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(modelConfig.ConfigFile).To(ContainSubstring("cuda: true"))
|
||||
})
|
||||
|
||||
It("should use custom enable_parameters from preferences", func() {
|
||||
preferences := json.RawMessage(`{"enable_parameters": "num_inference_steps,guidance_scale"}`)
|
||||
details := Details{
|
||||
URI: "https://huggingface.co/test/my-model",
|
||||
Preferences: preferences,
|
||||
}
|
||||
|
||||
modelConfig, err := importer.Import(details)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(modelConfig.ConfigFile).To(ContainSubstring("enable_parameters: num_inference_steps,guidance_scale"))
|
||||
})
|
||||
|
||||
It("should use custom backend from preferences", func() {
|
||||
preferences := json.RawMessage(`{"backend": "diffusers"}`)
|
||||
details := Details{
|
||||
URI: "https://huggingface.co/test/my-model",
|
||||
Preferences: preferences,
|
||||
}
|
||||
|
||||
modelConfig, err := importer.Import(details)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(modelConfig.ConfigFile).To(ContainSubstring("backend: diffusers"))
|
||||
})
|
||||
|
||||
It("should handle invalid JSON preferences", func() {
|
||||
preferences := json.RawMessage(`invalid json`)
|
||||
details := Details{
|
||||
URI: "https://huggingface.co/test/my-model",
|
||||
Preferences: preferences,
|
||||
}
|
||||
|
||||
_, err := importer.Import(details)
|
||||
Expect(err).To(HaveOccurred())
|
||||
})
|
||||
|
||||
It("should extract filename correctly from URI with path", func() {
|
||||
details := importers.Details{
|
||||
URI: "https://huggingface.co/test/path/to/model",
|
||||
}
|
||||
|
||||
modelConfig, err := importer.Import(details)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(modelConfig.Name).To(Equal("model"))
|
||||
})
|
||||
|
||||
It("should include known_usecases as image in config", func() {
|
||||
details := Details{
|
||||
URI: "https://huggingface.co/test/my-model",
|
||||
}
|
||||
|
||||
modelConfig, err := importer.Import(details)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(modelConfig.ConfigFile).To(ContainSubstring("known_usecases:"))
|
||||
Expect(modelConfig.ConfigFile).To(ContainSubstring("- image"))
|
||||
})
|
||||
|
||||
It("should include diffusers configuration in config", func() {
|
||||
details := Details{
|
||||
URI: "https://huggingface.co/test/my-model",
|
||||
}
|
||||
|
||||
modelConfig, err := importer.Import(details)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(modelConfig.ConfigFile).To(ContainSubstring("diffusers:"))
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -20,6 +20,7 @@ var defaultImporters = []Importer{
|
||||
&MLXImporter{},
|
||||
&VLLMImporter{},
|
||||
&TransformersImporter{},
|
||||
&DiffuserImporter{},
|
||||
}
|
||||
|
||||
type Details struct {
|
||||
|
||||
@@ -299,6 +299,7 @@
|
||||
<option value="mlx-vlm">mlx-vlm</option>
|
||||
<option value="transformers">transformers</option>
|
||||
<option value="vllm">vllm</option>
|
||||
<option value="diffusers">diffusers</option>
|
||||
</select>
|
||||
<p class="mt-1 text-xs text-gray-400">
|
||||
Force a specific backend. Leave empty to auto-detect from URI.
|
||||
@@ -401,6 +402,71 @@
|
||||
Model type for transformers backend. Examples: AutoModelForCausalLM, SentenceTransformer, Mamba, MusicgenForConditionalGeneration. Leave empty to use default (AutoModelForCausalLM).
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<!-- Pipeline Type (Diffusers) -->
|
||||
<div x-show="commonPreferences.backend === 'diffusers'">
|
||||
<label class="block text-sm font-medium text-gray-300 mb-2">
|
||||
<i class="fas fa-stream mr-2"></i>Pipeline Type
|
||||
</label>
|
||||
<input
|
||||
x-model="commonPreferences.pipeline_type"
|
||||
type="text"
|
||||
placeholder="StableDiffusionPipeline (for diffusers backend)"
|
||||
class="w-full px-4 py-2 bg-gray-900/90 border border-gray-700/70 rounded-lg text-gray-200 focus:border-green-500 focus:ring-2 focus:ring-green-500/50 focus:outline-none transition-all"
|
||||
:disabled="isSubmitting">
|
||||
<p class="mt-1 text-xs text-gray-400">
|
||||
Pipeline type for diffusers backend. Examples: StableDiffusionPipeline, StableDiffusion3Pipeline, FluxPipeline. Leave empty to use default (StableDiffusionPipeline).
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<!-- Scheduler Type (Diffusers) -->
|
||||
<div x-show="commonPreferences.backend === 'diffusers'">
|
||||
<label class="block text-sm font-medium text-gray-300 mb-2">
|
||||
<i class="fas fa-clock mr-2"></i>Scheduler Type
|
||||
</label>
|
||||
<input
|
||||
x-model="commonPreferences.scheduler_type"
|
||||
type="text"
|
||||
placeholder="k_dpmpp_2m (optional)"
|
||||
class="w-full px-4 py-2 bg-gray-900/90 border border-gray-700/70 rounded-lg text-gray-200 focus:border-green-500 focus:ring-2 focus:ring-green-500/50 focus:outline-none transition-all"
|
||||
:disabled="isSubmitting">
|
||||
<p class="mt-1 text-xs text-gray-400">
|
||||
Scheduler type for diffusers backend. Examples: k_dpmpp_2m, euler_a, ddim. Leave empty to use model default.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<!-- Enable Parameters (Diffusers) -->
|
||||
<div x-show="commonPreferences.backend === 'diffusers'">
|
||||
<label class="block text-sm font-medium text-gray-300 mb-2">
|
||||
<i class="fas fa-cogs mr-2"></i>Enable Parameters
|
||||
</label>
|
||||
<input
|
||||
x-model="commonPreferences.enable_parameters"
|
||||
type="text"
|
||||
placeholder="negative_prompt,num_inference_steps (comma-separated)"
|
||||
class="w-full px-4 py-2 bg-gray-900/90 border border-gray-700/70 rounded-lg text-gray-200 focus:border-green-500 focus:ring-2 focus:ring-green-500/50 focus:outline-none transition-all"
|
||||
:disabled="isSubmitting">
|
||||
<p class="mt-1 text-xs text-gray-400">
|
||||
Enabled parameters for diffusers backend (comma-separated). Leave empty to use default (negative_prompt,num_inference_steps).
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<!-- CUDA (Diffusers) -->
|
||||
<div x-show="commonPreferences.backend === 'diffusers'">
|
||||
<label class="flex items-center cursor-pointer">
|
||||
<input
|
||||
x-model="commonPreferences.cuda"
|
||||
type="checkbox"
|
||||
class="w-5 h-5 rounded bg-gray-900/90 border-gray-700/70 text-green-500 focus:ring-2 focus:ring-green-500/50 focus:outline-none transition-all cursor-pointer"
|
||||
:disabled="isSubmitting">
|
||||
<span class="ml-3 text-sm font-medium text-gray-300">
|
||||
<i class="fas fa-microchip mr-2"></i>CUDA
|
||||
</span>
|
||||
</label>
|
||||
<p class="mt-1 ml-8 text-xs text-gray-400">
|
||||
Enable CUDA support for GPU acceleration with diffusers backend.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Custom Preferences -->
|
||||
@@ -658,7 +724,11 @@ function importModel() {
|
||||
quantizations: '',
|
||||
mmproj_quantizations: '',
|
||||
embeddings: false,
|
||||
type: ''
|
||||
type: '',
|
||||
pipeline_type: '',
|
||||
scheduler_type: '',
|
||||
enable_parameters: '',
|
||||
cuda: false
|
||||
},
|
||||
isSubmitting: false,
|
||||
currentJobId: null,
|
||||
@@ -733,6 +803,18 @@ function importModel() {
|
||||
if (this.commonPreferences.type && this.commonPreferences.type.trim()) {
|
||||
prefsObj.type = this.commonPreferences.type.trim();
|
||||
}
|
||||
if (this.commonPreferences.pipeline_type && this.commonPreferences.pipeline_type.trim()) {
|
||||
prefsObj.pipeline_type = this.commonPreferences.pipeline_type.trim();
|
||||
}
|
||||
if (this.commonPreferences.scheduler_type && this.commonPreferences.scheduler_type.trim()) {
|
||||
prefsObj.scheduler_type = this.commonPreferences.scheduler_type.trim();
|
||||
}
|
||||
if (this.commonPreferences.enable_parameters && this.commonPreferences.enable_parameters.trim()) {
|
||||
prefsObj.enable_parameters = this.commonPreferences.enable_parameters.trim();
|
||||
}
|
||||
if (this.commonPreferences.cuda) {
|
||||
prefsObj.cuda = true;
|
||||
}
|
||||
|
||||
// Add custom preferences (can override common ones)
|
||||
this.preferences.forEach(pref => {
|
||||
|
||||
@@ -1,8 +1,3 @@
|
||||
module github.com/mudler/LocalAI/docs
|
||||
|
||||
go 1.19
|
||||
|
||||
require (
|
||||
github.com/McShelby/hugo-theme-relearn v0.0.0-20251117214752-f69a085322cc // indirect
|
||||
github.com/gohugoio/hugo-mod-bootstrap-scss/v5 v5.20300.20400 // indirect
|
||||
)
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
github.com/McShelby/hugo-theme-relearn v0.0.0-20251117214752-f69a085322cc h1:8BvuabGtqXqhT4H01SS7s0zXea0B2R5ZOFEcPugMbNg=
|
||||
github.com/McShelby/hugo-theme-relearn v0.0.0-20251117214752-f69a085322cc/go.mod h1:mKQQdxZNIlLvAj8X3tMq+RzntIJSr9z7XdzuMomt0IM=
|
||||
github.com/gohugoio/hugo-mod-bootstrap-scss/v5 v5.20300.20400 h1:L6+F22i76xmeWWwrtijAhUbf3BiRLmpO5j34bgl1ggU=
|
||||
github.com/gohugoio/hugo-mod-bootstrap-scss/v5 v5.20300.20400/go.mod h1:uekq1D4ebeXgduLj8VIZy8TgfTjrLdSl6nPtVczso78=
|
||||
github.com/gohugoio/hugo-mod-jslibs-dist/popperjs/v2 v2.21100.20000/go.mod h1:mFberT6ZtcchrsDtfvJM7aAH2bDKLdOnruUHl0hlapI=
|
||||
github.com/twbs/bootstrap v5.3.3+incompatible/go.mod h1:fZTSrkpSf0/HkL0IIJzvVspTt1r9zuf7XlZau8kpcY0=
|
||||
|
||||
2
go.mod
2
go.mod
@@ -54,6 +54,7 @@ require (
|
||||
go.opentelemetry.io/otel/metric v1.38.0
|
||||
go.opentelemetry.io/otel/sdk/metric v1.38.0
|
||||
google.golang.org/grpc v1.76.0
|
||||
google.golang.org/protobuf v1.36.10
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
oras.land/oras-go/v2 v2.6.0
|
||||
@@ -65,7 +66,6 @@ require (
|
||||
github.com/stretchr/testify v1.11.1 // indirect
|
||||
github.com/swaggo/files/v2 v2.0.2 // indirect
|
||||
github.com/valyala/fasttemplate v1.2.2 // indirect
|
||||
google.golang.org/protobuf v1.36.10 // indirect
|
||||
)
|
||||
|
||||
require (
|
||||
|
||||
Reference in New Issue
Block a user