Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: first pass at improving logging #1956

Merged
merged 1 commit into from
Apr 4, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .vscode/extensions.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
{
"recommendations": [
"golang.go"
]
}
2 changes: 1 addition & 1 deletion core/config/application_config.go
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ func WithStringGalleries(galls string) AppOption {
}
var galleries []gallery.Gallery
if err := json.Unmarshal([]byte(galls), &galleries); err != nil {
log.Error().Msgf("failed loading galleries: %s", err.Error())
log.Error().Err(err).Msg("failed loading galleries")
}
o.Galleries = append(o.Galleries, galleries...)
}
Expand Down
10 changes: 5 additions & 5 deletions core/http/endpoints/openai/chat.go
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ func ChatEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, startup

result, err := handleQuestion(config, req, ml, startupOptions, results[0].arguments, prompt)
if err != nil {
log.Error().Msgf("error handling question: %s", err.Error())
log.Error().Err(err).Msg("error handling question")
return
}

Expand Down Expand Up @@ -268,7 +268,7 @@ func ChatEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, startup
}
templatedChatMessage, err := ml.EvaluateTemplateForChatMessage(config.TemplateConfig.ChatMessage, chatMessageData)
if err != nil {
log.Error().Msgf("error processing message %+v using template \"%s\": %v. Skipping!", chatMessageData, config.TemplateConfig.ChatMessage, err)
log.Error().Err(err).Interface("message", chatMessageData).Str("template", config.TemplateConfig.ChatMessage).Msg("error processing message with template, skipping")
} else {
if templatedChatMessage == "" {
log.Warn().Msgf("template \"%s\" produced blank output for %+v. Skipping!", config.TemplateConfig.ChatMessage, chatMessageData)
Expand Down Expand Up @@ -455,7 +455,7 @@ func ChatEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, startup
case noActionsToRun:
result, err := handleQuestion(config, input, ml, startupOptions, results[0].arguments, predInput)
if err != nil {
log.Error().Msgf("error handling question: %s", err.Error())
log.Error().Err(err).Msg("error handling question")
return
}
*c = append(*c, schema.Choice{
Expand Down Expand Up @@ -565,13 +565,13 @@ func handleQuestion(config *config.BackendConfig, input *schema.OpenAIRequest, m

predFunc, err := backend.ModelInference(input.Context, prompt, images, ml, *config, o, nil)
if err != nil {
log.Error().Msgf("inference error: %s", err.Error())
log.Error().Err(err).Msg("model inference failed")
return "", err
}

prediction, err := predFunc()
if err != nil {
log.Error().Msgf("inference error: %s", err.Error())
log.Error().Err(err).Msg("prediction failed")
return "", err
}
return backend.Finetune(*config, prompt, prediction.Response), nil
Expand Down
5 changes: 3 additions & 2 deletions core/http/endpoints/openai/files_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ package openai
import (
"encoding/json"
"fmt"
"github.com/rs/zerolog/log"
"io"
"mime/multipart"
"net/http"
Expand All @@ -12,6 +11,8 @@ import (
"path/filepath"
"strings"

"github.com/rs/zerolog/log"

"github.com/go-skynet/LocalAI/core/config"

utils2 "github.com/go-skynet/LocalAI/pkg/utils"
Expand Down Expand Up @@ -297,7 +298,7 @@ func responseToListFile(t *testing.T, resp *http.Response) ListFiles {

err := json.NewDecoder(strings.NewReader(responseToString)).Decode(&listFiles)
if err != nil {
log.Error().Msgf("Failed to decode response: %s", err)
log.Error().Err(err).Msg("failed to decode response")
}

return listFiles
Expand Down
10 changes: 5 additions & 5 deletions core/services/backend_monitor.go
Original file line number Diff line number Diff line change
Expand Up @@ -63,34 +63,34 @@ func (bm *BackendMonitor) SampleLocalBackendProcess(model string) (*schema.Backe
pid, err := bm.modelLoader.GetGRPCPID(backend)

if err != nil {
log.Error().Msgf("model %s : failed to find pid %+v", model, err)
log.Error().Err(err).Str("model", model).Msg("failed to find GRPC pid")
return nil, err
}

// Name is slightly frightening but this does _not_ create a new process, rather it looks up an existing process by PID.
backendProcess, err := gopsutil.NewProcess(int32(pid))

if err != nil {
log.Error().Msgf("model %s [PID %d] : error getting process info %+v", model, pid, err)
log.Error().Err(err).Str("model", model).Int("pid", pid).Msg("error getting process info")
return nil, err
}

memInfo, err := backendProcess.MemoryInfo()

if err != nil {
log.Error().Msgf("model %s [PID %d] : error getting memory info %+v", model, pid, err)
log.Error().Err(err).Str("model", model).Int("pid", pid).Msg("error getting memory info")
return nil, err
}

memPercent, err := backendProcess.MemoryPercent()
if err != nil {
log.Error().Msgf("model %s [PID %d] : error getting memory percent %+v", model, pid, err)
log.Error().Err(err).Str("model", model).Int("pid", pid).Msg("error getting memory percent")
return nil, err
}

cpuPercent, err := backendProcess.CPUPercent()
if err != nil {
log.Error().Msgf("model %s [PID %d] : error getting cpu percent %+v", model, pid, err)
log.Error().Err(err).Str("model", model).Int("pid", pid).Msg("error getting cpu percent")
return nil, err
}

Expand Down
2 changes: 1 addition & 1 deletion core/startup/config_file_watcher.go
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ func WatchConfigDirectory(configDir string, appConfig *config.ApplicationConfig)
if !ok {
return
}
log.Error().Msgf("WatchConfigDirectory goroutine error: %+v", err)
log.Error().Err(err).Msg("error encountered while watching config directory")
}
}
}()
Expand Down
6 changes: 3 additions & 3 deletions core/startup/startup.go
Original file line number Diff line number Diff line change
Expand Up @@ -61,17 +61,17 @@ func Startup(opts ...config.AppOption) (*config.BackendConfigLoader, *model.Mode
configLoaderOpts := options.ToConfigLoaderOptions()

if err := cl.LoadBackendConfigsFromPath(options.ModelPath, configLoaderOpts...); err != nil {
log.Error().Msgf("error loading config files: %s", err.Error())
log.Error().Err(err).Msg("error loading config files")
}

if options.ConfigFile != "" {
if err := cl.LoadBackendConfigFile(options.ConfigFile, configLoaderOpts...); err != nil {
log.Error().Msgf("error loading config file: %s", err.Error())
log.Error().Err(err).Msg("error loading config file")
}
}

if err := cl.Preload(options.ModelPath); err != nil {
log.Error().Msgf("error downloading models: %s", err.Error())
log.Error().Err(err).Msg("error downloading models")
}

if options.PreloadJSONModels != "" {
Expand Down
10 changes: 5 additions & 5 deletions main.go
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ func main() {

path, err := os.Getwd()
if err != nil {
log.Error().Msgf("error: %s", err.Error())
log.Error().Err(err).Msg("failed to get current directory")
os.Exit(1)
}

Expand Down Expand Up @@ -340,7 +340,7 @@ For a list of compatible model, check out: https://localai.io/model-compatibilit

appHTTP, err := http.App(cl, ml, options)
if err != nil {
log.Error().Msg("Error during HTTP App constructor")
log.Error().Err(err).Msg("error during HTTP App construction")
return err
}

Expand All @@ -357,7 +357,7 @@ For a list of compatible model, check out: https://localai.io/model-compatibilit
Action: func(ctx *cli.Context) error {
var galleries []gallery.Gallery
if err := json.Unmarshal([]byte(ctx.String("galleries")), &galleries); err != nil {
log.Error().Msgf("unable to load galleries: %s", err.Error())
log.Error().Err(err).Msg("unable to load galleries")
}

models, err := gallery.AvailableGalleryModels(galleries, ctx.String("models-path"))
Expand All @@ -382,7 +382,7 @@ For a list of compatible model, check out: https://localai.io/model-compatibilit

var galleries []gallery.Gallery
if err := json.Unmarshal([]byte(ctx.String("galleries")), &galleries); err != nil {
log.Error().Msgf("unable to load galleries: %s", err.Error())
log.Error().Err(err).Msg("unable to load galleries")
}

progressBar := progressbar.NewOptions(
Expand Down Expand Up @@ -547,7 +547,7 @@ For a list of compatible model, check out: https://localai.io/model-compatibilit

err = app.Run(os.Args)
if err != nil {
log.Error().Msgf("error: %s", err.Error())
log.Error().Err(err).Msg("application runtime error")
os.Exit(1)
}
}
2 changes: 1 addition & 1 deletion pkg/gallery/models.go
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ func GetGalleryConfigFromURL(url string) (Config, error) {
return yaml.Unmarshal(d, &config)
})
if err != nil {
log.Error().Msgf("GetGalleryConfigFromURL error for url %s\n%s", url, err.Error())
log.Error().Err(err).Str("url", url).Msg("failed to get gallery config for url")
return config, err
}
return config, nil
Expand Down
6 changes: 3 additions & 3 deletions pkg/model/initializers.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@ import (
)

var Aliases map[string]string = map[string]string{
"go-llama": LLamaCPP,
"llama": LLamaCPP,
"go-llama": LLamaCPP,
"llama": LLamaCPP,
"embedded-store": LocalStoreBackend,
}

Expand Down Expand Up @@ -127,7 +127,7 @@ func (ml *ModelLoader) grpcModel(backend string, o *Options) func(string, string
break
}
if err != nil && i == o.grpcAttempts-1 {
log.Error().Msgf("Failed starting/connecting to the gRPC service: %s", err.Error())
log.Error().Err(err).Msg("failed starting/connecting to the gRPC service")
}
time.Sleep(time.Duration(o.grpcAttemptsDelay) * time.Second)
}
Expand Down
8 changes: 4 additions & 4 deletions pkg/model/watchdog.go
Original file line number Diff line number Diff line change
Expand Up @@ -110,10 +110,10 @@ func (wd *WatchDog) checkIdle() {
log.Debug().Msgf("[WatchDog] %s: idle connection", address)
if time.Since(t) > wd.idletimeout {
log.Warn().Msgf("[WatchDog] Address %s is idle for too long, killing it", address)
p, ok := wd.addressModelMap[address]
model, ok := wd.addressModelMap[address]
if ok {
if err := wd.pm.ShutdownModel(p); err != nil {
log.Error().Msgf("[watchdog] Error shutting down model %s: %v", p, err)
if err := wd.pm.ShutdownModel(model); err != nil {
log.Error().Err(err).Str("model", model).Msg("[watchdog] error shutting down model")
}
log.Debug().Msgf("[WatchDog] model shut down: %s", address)
delete(wd.idleTime, address)
Expand Down Expand Up @@ -141,7 +141,7 @@ func (wd *WatchDog) checkBusy() {
if ok {
log.Warn().Msgf("[WatchDog] Model %s is busy for too long, killing it", model)
if err := wd.pm.ShutdownModel(model); err != nil {
log.Error().Msgf("[watchdog] Error shutting down model %s: %v", model, err)
log.Error().Err(err).Str("model", model).Msg("[watchdog] error shutting down model")
}
log.Debug().Msgf("[WatchDog] model shut down: %s", address)
delete(wd.timetable, address)
Expand Down
19 changes: 11 additions & 8 deletions pkg/startup/model_preload.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,14 +35,15 @@ func PreloadModelsConfigurations(modelLibraryURL string, modelPath string, model
modelYAML, err := embedded.ResolveContent(url)
// If we resolve something, just save it to disk and continue
if err != nil {
log.Error().Msgf("error loading model: %s", err.Error())
log.Error().Err(err).Msg("error resolving model content")
continue
}

log.Debug().Msgf("[startup] resolved embedded model: %s", url)
md5Name := utils.MD5(url)
if err := os.WriteFile(filepath.Join(modelPath, md5Name)+".yaml", modelYAML, os.ModePerm); err != nil {
log.Error().Msgf("error loading model: %s", err.Error())
modelDefinitionFilePath := filepath.Join(modelPath, md5Name) + ".yaml"
if err := os.WriteFile(modelDefinitionFilePath, modelYAML, os.ModePerm); err != nil {
log.Error().Err(err).Str("filepath", modelDefinitionFilePath).Msg("error writing model definition")
}
case downloader.LooksLikeURL(url):
log.Debug().Msgf("[startup] resolved model to download: %s", url)
Expand All @@ -52,11 +53,12 @@ func PreloadModelsConfigurations(modelLibraryURL string, modelPath string, model

// check if file exists
if _, err := os.Stat(filepath.Join(modelPath, md5Name)); errors.Is(err, os.ErrNotExist) {
err := downloader.DownloadFile(url, filepath.Join(modelPath, md5Name)+".yaml", "", func(fileName, current, total string, percent float64) {
modelDefinitionFilePath := filepath.Join(modelPath, md5Name) + ".yaml"
err := downloader.DownloadFile(url, modelDefinitionFilePath, "", func(fileName, current, total string, percent float64) {
utils.DisplayDownloadFunction(fileName, current, total, percent)
})
if err != nil {
log.Error().Msgf("error loading model: %s", err.Error())
log.Error().Err(err).Str("url", url).Str("filepath", modelDefinitionFilePath).Msg("error downloading model")
}
}
default:
Expand All @@ -67,12 +69,13 @@ func PreloadModelsConfigurations(modelLibraryURL string, modelPath string, model

modelYAML, err := os.ReadFile(url)
if err != nil {
log.Error().Msgf("error loading model: %s", err.Error())
log.Error().Err(err).Str("filepath", url).Msg("error reading model definition")
continue
}

if err := os.WriteFile(filepath.Join(modelPath, md5Name)+".yaml", modelYAML, os.ModePerm); err != nil {
log.Error().Msgf("error loading model: %s", err.Error())
modelDefinitionFilePath := filepath.Join(modelPath, md5Name) + ".yaml"
if err := os.WriteFile(modelDefinitionFilePath, modelYAML, os.ModePerm); err != nil {
log.Error().Err(err).Str("filepath", modelDefinitionFilePath).Msg("error loading model: %s")
}
} else {
log.Warn().Msgf("[startup] failed resolving model '%s'", url)
Expand Down
11 changes: 6 additions & 5 deletions pkg/utils/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,21 +2,22 @@ package utils

import (
"encoding/json"
"github.com/rs/zerolog/log"
"os"
"path/filepath"

"github.com/rs/zerolog/log"
)

func SaveConfig(filePath, fileName string, obj any) {
file, err := json.MarshalIndent(obj, "", " ")
if err != nil {
log.Error().Msgf("Failed to JSON marshal the uploadedFiles: %s", err)
log.Error().Err(err).Msg("failed to JSON marshal the uploadedFiles")
}

absolutePath := filepath.Join(filePath, fileName)
err = os.WriteFile(absolutePath, file, 0644)
if err != nil {
log.Error().Msgf("Failed to save configuration file to %s: %s", absolutePath, err)
log.Error().Err(err).Str("filepath", absolutePath).Msg("failed to save configuration file")
}
}

Expand All @@ -31,11 +32,11 @@ func LoadConfig(filePath, fileName string, obj interface{}) {

file, err := os.ReadFile(uploadFilePath)
if err != nil {
log.Error().Msgf("Failed to read file: %s", err)
log.Error().Err(err).Str("filepath", uploadFilePath).Msg("failed to read file")
} else {
err = json.Unmarshal(file, &obj)
if err != nil {
log.Error().Msgf("Failed to JSON unmarshal the file %s: %v", uploadFilePath, err)
log.Error().Err(err).Str("filepath", uploadFilePath).Msg("failed to parse file as JSON")
}
}
}
Loading