Skip to content

Commit b722e7e

Browse files
committed
feat: cleanups, small enhancements
Signed-off-by: mudler <[email protected]>
1 parent 6d19a8b commit b722e7e

File tree

6 files changed

+35
-15
lines changed

6 files changed

+35
-15
lines changed

api/api.go

+3
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,9 @@ func App(opts ...AppOption) (*fiber.App, error) {
5151
}))
5252
}
5353

54+
log.Info().Msgf("Starting LocalAI using %d threads, with models path: %s", options.threads, options.loader.ModelPath)
55+
log.Info().Msgf("LocalAI version: %s", internal.PrintableVersion())
56+
5457
cm := NewConfigMerger()
5558
if err := cm.LoadConfigs(options.loader.ModelPath); err != nil {
5659
log.Error().Msgf("error loading config files: %s", err.Error())

api/openai.go

+7-3
Original file line numberDiff line numberDiff line change
@@ -214,15 +214,17 @@ func completionEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error {
214214

215215
if input.Stream {
216216
if len(config.PromptStrings) > 1 {
217-
return errors.New("cannot handle more than 1 `PromptStrings` when `Stream`ing")
217+
return errors.New("cannot handle more than 1 `PromptStrings` when Streaming")
218218
}
219219

220220
predInput := config.PromptStrings[0]
221221

222222
// A model can have a "file.bin.tmpl" file associated with a prompt template prefix
223223
templatedInput, err := o.loader.TemplatePrefix(templateFile, struct {
224224
Input string
225-
}{Input: predInput})
225+
}{
226+
Input: predInput,
227+
})
226228
if err == nil {
227229
predInput = templatedInput
228230
log.Debug().Msgf("Template found, input modified to: %s", predInput)
@@ -268,7 +270,9 @@ func completionEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error {
268270
// A model can have a "file.bin.tmpl" file associated with a prompt template prefix
269271
templatedInput, err := o.loader.TemplatePrefix(templateFile, struct {
270272
Input string
271-
}{Input: i})
273+
}{
274+
Input: i,
275+
})
272276
if err == nil {
273277
i = templatedInput
274278
log.Debug().Msgf("Template found, input modified to: %s", i)

api/options.go

+16
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,11 @@ package api
33
import (
44
"context"
55
"embed"
6+
"encoding/json"
67

78
"github.com/go-skynet/LocalAI/pkg/gallery"
89
model "github.com/go-skynet/LocalAI/pkg/model"
10+
"github.com/rs/zerolog/log"
911
)
1012

1113
type Option struct {
@@ -69,6 +71,20 @@ func WithBackendAssets(f embed.FS) AppOption {
6971
}
7072
}
7173

74+
func WithStringGalleries(galls string) AppOption {
75+
return func(o *Option) {
76+
if galls == "" {
77+
log.Debug().Msgf("no galleries to load")
78+
return
79+
}
80+
var galleries []gallery.Gallery
81+
if err := json.Unmarshal([]byte(galls), &galleries); err != nil {
82+
log.Error().Msgf("failed loading galleries: %s", err.Error())
83+
}
84+
o.galleries = append(o.galleries, galleries...)
85+
}
86+
}
87+
7288
func WithGalleries(galleries []gallery.Gallery) AppOption {
7389
return func(o *Option) {
7490
o.galleries = append(o.galleries, galleries...)

internal/version.go

+1-1
Original file line numberDiff line numberDiff line change
@@ -6,5 +6,5 @@ var Version = ""
66
var Commit = ""
77

88
func PrintableVersion() string {
9-
return fmt.Sprintf("LocalAI %s (%s)", Version, Commit)
9+
return fmt.Sprintf("%s (%s)", Version, Commit)
1010
}

main.go

+3-11
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,11 @@
11
package main
22

33
import (
4-
"encoding/json"
5-
"fmt"
64
"os"
75
"path/filepath"
86

97
api "github.com/go-skynet/LocalAI/api"
108
"github.com/go-skynet/LocalAI/internal"
11-
"github.com/go-skynet/LocalAI/pkg/gallery"
129
model "github.com/go-skynet/LocalAI/pkg/model"
1310
"github.com/rs/zerolog"
1411
"github.com/rs/zerolog/log"
@@ -126,19 +123,13 @@ Some of the models compatible are:
126123
- Alpaca
127124
- StableLM (ggml quantized)
128125
129-
It uses llama.cpp, ggml and gpt4all as backend with golang c bindings.
126+
For a list of compatible model, check out: https://localai.io/model-compatibility/index.html
130127
`,
131128
UsageText: `local-ai [options]`,
132-
Copyright: "go-skynet authors",
129+
Copyright: "Ettore Di Giacinto",
133130
Action: func(ctx *cli.Context) error {
134-
fmt.Printf("Starting LocalAI using %d threads, with models path: %s\n", ctx.Int("threads"), ctx.String("models-path"))
135-
galls := ctx.String("galleries")
136-
var galleries []gallery.Gallery
137-
err := json.Unmarshal([]byte(galls), &galleries)
138-
fmt.Println(err)
139131
app, err := api.App(
140132
api.WithConfigFile(ctx.String("config-file")),
141-
api.WithGalleries(galleries),
142133
api.WithJSONStringPreload(ctx.String("preload-models")),
143134
api.WithYAMLConfigPreload(ctx.String("preload-models-config")),
144135
api.WithModelLoader(model.NewModelLoader(ctx.String("models-path"))),
@@ -147,6 +138,7 @@ It uses llama.cpp, ggml and gpt4all as backend with golang c bindings.
147138
api.WithImageDir(ctx.String("image-path")),
148139
api.WithAudioDir(ctx.String("audio-path")),
149140
api.WithF16(ctx.Bool("f16")),
141+
api.WithStringGalleries(ctx.String("galleries")),
150142
api.WithDisableMessage(false),
151143
api.WithCors(ctx.Bool("cors")),
152144
api.WithCorsAllowOrigins(ctx.String("cors-allow-origins")),

pkg/gallery/gallery.go

+5
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ import (
44
"fmt"
55
"os"
66
"path/filepath"
7+
"strings"
78

89
"github.com/go-skynet/LocalAI/pkg/utils"
910
"github.com/imdario/mergo"
@@ -17,6 +18,10 @@ type Gallery struct {
1718

1819
// Installs a model from the gallery (galleryname@modelname)
1920
func InstallModelFromGallery(galleries []Gallery, name string, basePath string, req GalleryModel, downloadStatus func(string, string, string, float64)) error {
21+
22+
// os.PathSeparator is not allowed in model names. Replace them with "__" to avoid conflicts with file paths.
23+
name = strings.ReplaceAll(name, string(os.PathSeparator), "__")
24+
2025
models, err := AvailableGalleryModels(galleries, basePath)
2126
if err != nil {
2227
return err

0 commit comments

Comments
 (0)