Skip to content

Commit

Permalink
refactor: Replace logging package with corelog (#2406)
Browse files Browse the repository at this point in the history
## Relevant issue(s)

Resolves #2405

## Description

This PR replaces the logging package with the `corelog` library.

Please review along with sourcenetwork/corelog#2

`corelog` utilizes the `log/slog` standard library package to produce
structured logs.

The interface is kept mostly the same with some modifications:

- `Debug` and `Fatal` levels have been removed
- `context` for logging calls has been moved to a context specific
variant. For example `Info` has a `InfoContext` variant.
- `Feedback` methods have been removed in favor of restricting the
output to `stderr` and `stdout`.
- Supported output formats are `text` or `json`. `csv` is not supported
by `slog`
- Support for colored logs is not yet implemented, but can be easily
added in a future PR.

## Tasks

- [x] I made sure the code is well commented, particularly
hard-to-understand areas.
- [x] I made sure the repository-held documentation is changed
accordingly.
- [x] I made sure the pull request title adheres to the conventional
commit style (the subset used in the project can be found in
[tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)).
- [x] I made sure to discuss its limitations such as threats to
validity, vulnerability to mistake and misuse, robustness to
invalidation of assumptions, resource requirements, ...

## How has this been tested?

Manually tested

Specify the platform(s) on which this was tested:
- MacOS
  • Loading branch information
nasdf authored Mar 19, 2024
1 parent c74a6f7 commit d1b1a76
Show file tree
Hide file tree
Showing 42 changed files with 246 additions and 2,079 deletions.
4 changes: 2 additions & 2 deletions cli/cli.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,10 @@ package cli
import (
"github.com/spf13/cobra"

"github.com/sourcenetwork/defradb/logging"
"github.com/sourcenetwork/corelog"
)

var log = logging.MustNewLogger("cli")
var log = corelog.NewLogger("cli")

// NewDefraCommand returns the root command instanciated with its tree of subcommands.
func NewDefraCommand() *cobra.Command {
Expand Down
67 changes: 17 additions & 50 deletions cli/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,9 @@ import (
"path/filepath"
"strings"

"github.com/sourcenetwork/corelog"
"github.com/spf13/pflag"
"github.com/spf13/viper"

"github.com/sourcenetwork/defradb/logging"
)

const (
Expand All @@ -41,11 +40,12 @@ var configPaths = []string{

// configFlags is a mapping of config keys to cli flags to bind to.
var configFlags = map[string]string{
"log.level": "loglevel",
"log.output": "logoutput",
"log.format": "logformat",
"log.stacktrace": "logtrace",
"log.nocolor": "lognocolor",
"log.level": "log-level",
"log.output": "log-output",
"log.format": "log-format",
"log.stacktrace": "log-stacktrace",
"log.source": "log-source",
"log.overrides": "log-overrides",
"api.address": "url",
"datastore.maxtxnretries": "max-txn-retries",
"datastore.store": "store",
Expand Down Expand Up @@ -125,14 +125,17 @@ func loadConfig(rootdir string, flags *pflag.FlagSet) (*viper.Viper, error) {
}
}

logCfg := loggingConfig(cfg.Sub("log"))
logCfg.OverridesByLoggerName = make(map[string]logging.Config)
// set default logging config
corelog.SetConfig(corelog.Config{
Level: cfg.GetString("log.level"),
Format: cfg.GetString("log.format"),
Output: cfg.GetString("log.output"),
EnableStackTrace: cfg.GetBool("log.stacktrace"),
EnableSource: cfg.GetBool("log.source"),
})

// apply named logging overrides
for key := range cfg.GetStringMap("log.overrides") {
logCfg.OverridesByLoggerName[key] = loggingConfig(cfg.Sub("log.overrides." + key))
}
logging.SetConfig(logCfg)
// set logging config overrides
corelog.SetConfigOverrides(cfg.GetString("log.overrides"))

return cfg, nil
}
Expand All @@ -147,39 +150,3 @@ func bindConfigFlags(cfg *viper.Viper, flags *pflag.FlagSet) error {
}
return nil
}

// loggingConfig returns a new logging config from the given config.
func loggingConfig(cfg *viper.Viper) logging.Config {
var level int8
switch value := cfg.GetString("level"); value {
case configLogLevelDebug:
level = logging.Debug
case configLogLevelInfo:
level = logging.Info
case configLogLevelError:
level = logging.Error
case configLogLevelFatal:
level = logging.Fatal
default:
level = logging.Info
}

var format logging.EncoderFormat
switch value := cfg.GetString("format"); value {
case configLogFormatJSON:
format = logging.JSON
case configLogFormatCSV:
format = logging.CSV
default:
format = logging.CSV
}

return logging.Config{
Level: logging.NewLogLevelOption(level),
EnableStackTrace: logging.NewEnableStackTraceOption(cfg.GetBool("stacktrace")),
DisableColor: logging.NewDisableColorOption(cfg.GetBool("nocolor")),
EncoderFormat: logging.NewEncoderFormatOption(format),
OutputPaths: []string{cfg.GetString("output")},
EnableCaller: logging.NewEnableCallerOption(cfg.GetBool("caller")),
}
}
8 changes: 4 additions & 4 deletions cli/config_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -53,9 +53,9 @@ func TestLoadConfigNotExist(t *testing.T) {
assert.Equal(t, []string{}, cfg.GetStringSlice("net.peers"))

assert.Equal(t, "info", cfg.GetString("log.level"))
assert.Equal(t, false, cfg.GetBool("log.stacktrace"))
assert.Equal(t, "csv", cfg.GetString("log.format"))
assert.Equal(t, "stderr", cfg.GetString("log.output"))
assert.Equal(t, false, cfg.GetBool("log.nocolor"))
assert.Equal(t, false, cfg.GetBool("log.caller"))
assert.Equal(t, "text", cfg.GetString("log.format"))
assert.Equal(t, false, cfg.GetBool("log.stacktrace"))
assert.Equal(t, false, cfg.GetBool("log.source"))
assert.Equal(t, "", cfg.GetString("log.overrides"))
}
24 changes: 15 additions & 9 deletions cli/root.go
Original file line number Diff line number Diff line change
Expand Up @@ -38,33 +38,39 @@ Start a DefraDB node, interact with a local or remote node, and much more.
)

cmd.PersistentFlags().String(
"loglevel",
"log-level",
"info",
"Log level to use. Options are debug, info, error, fatal",
)

cmd.PersistentFlags().String(
"logoutput",
"log-output",
"stderr",
"Log output path",
"Log output path. Options are stderr or stdout.",
)

cmd.PersistentFlags().String(
"logformat",
"csv",
"Log format to use. Options are csv, json",
"log-format",
"text",
"Log format to use. Options are text or json",
)

cmd.PersistentFlags().Bool(
"logtrace",
"log-stacktrace",
false,
"Include stacktrace in error and fatal logs",
)

cmd.PersistentFlags().Bool(
"lognocolor",
"log-source",
false,
"Disable colored log output",
"Include source location in logs",
)

cmd.PersistentFlags().String(
"log-overrides",
"",
"Logger config overrides. Format <name>,<key>=<val>,...;<name>,...",
)

cmd.PersistentFlags().String(
Expand Down
2 changes: 1 addition & 1 deletion cli/server_dump.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ func MakeServerDumpCmd() *cobra.Command {
Short: "Dumps the state of the entire database",
RunE: func(cmd *cobra.Command, _ []string) error {
cfg := mustGetContextConfig(cmd)
log.FeedbackInfo(cmd.Context(), "Dumping DB state...")
log.InfoContext(cmd.Context(), "Dumping DB state...")

if cfg.GetString("datastore.store") != configStoreBadger {
return errors.New("server-side dump is only supported for the Badger datastore")
Expand Down
8 changes: 4 additions & 4 deletions cli/start.go
Original file line number Diff line number Diff line change
Expand Up @@ -108,11 +108,11 @@ func MakeStartCommand() *cobra.Command {

defer func() {
if err := n.Close(cmd.Context()); err != nil {
log.FeedbackErrorE(cmd.Context(), "Stopping DefraDB", err)
log.ErrorContextE(cmd.Context(), "Stopping DefraDB", err)
}
}()

log.FeedbackInfo(cmd.Context(), "Starting DefraDB")
log.InfoContext(cmd.Context(), "Starting DefraDB")
if err := n.Start(cmd.Context()); err != nil {
return err
}
Expand All @@ -122,9 +122,9 @@ func MakeStartCommand() *cobra.Command {

select {
case <-cmd.Context().Done():
log.FeedbackInfo(cmd.Context(), "Received context cancellation; shutting down...")
log.InfoContext(cmd.Context(), "Received context cancellation; shutting down...")
case <-signalCh:
log.FeedbackInfo(cmd.Context(), "Received interrupt; shutting down...")
log.InfoContext(cmd.Context(), "Received interrupt; shutting down...")
}

return nil
Expand Down
5 changes: 2 additions & 3 deletions datastore/blockstore.go
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,6 @@ func (bs *bstore) HashOnRead(enabled bool) {
// Get returns a block from the blockstore.
func (bs *bstore) Get(ctx context.Context, k cid.Cid) (blocks.Block, error) {
if !k.Defined() {
log.Error(ctx, "Undefined CID in blockstore")
return nil, ipld.ErrNotFound{Cid: k}
}
bdata, err := bs.store.Get(ctx, dshelp.MultihashToDsKey(k.Hash()))
Expand Down Expand Up @@ -164,13 +163,13 @@ func (bs *bstore) AllKeysChan(ctx context.Context) (<-chan cid.Cid, error) {
return
}
if e.Error != nil {
log.ErrorE(ctx, "Blockstore.AllKeysChan errored", e.Error)
log.ErrorContextE(ctx, "Blockstore.AllKeysChan errored", e.Error)
return
}

hash, err := dshelp.DsKeyToMultihash(ds.RawKey(e.Key))
if err != nil {
log.ErrorE(ctx, "Error parsing key from binary", err)
log.ErrorContextE(ctx, "Error parsing key from binary", err)
continue
}
k := cid.NewCidV1(cid.Raw, hash)
Expand Down
5 changes: 3 additions & 2 deletions datastore/store.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,13 @@ import (
blockstore "github.com/ipfs/boxo/blockstore"
ds "github.com/ipfs/go-datastore"

"github.com/sourcenetwork/corelog"

"github.com/sourcenetwork/defradb/datastore/iterable"
"github.com/sourcenetwork/defradb/logging"
)

var (
log = logging.MustNewLogger("store")
log = corelog.NewLogger("store")
)

// RootStore wraps Batching and TxnDatastore requiring datastore to support both batching and transactions.
Expand Down
2 changes: 1 addition & 1 deletion db/collection.go
Original file line number Diff line number Diff line change
Expand Up @@ -1151,7 +1151,7 @@ func (c *collection) getAllDocIDsChan(
go func() {
defer func() {
if err := q.Close(); err != nil {
log.ErrorE(ctx, errFailedtoCloseQueryReqAllIDs, err)
log.ErrorContextE(ctx, errFailedtoCloseQueryReqAllIDs, err)
}
close(resCh)
c.discardImplicitTxn(ctx, txn)
Expand Down
2 changes: 1 addition & 1 deletion db/collection_delete.go
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ func (c *collection) deleteWithFilter(
// If the plan isn't properly closed at any exit point log the error.
defer func() {
if err := selectionPlan.Close(); err != nil {
log.ErrorE(ctx, "Failed to close the request plan, after filter delete", err)
log.ErrorContextE(ctx, "Failed to close the request plan, after filter delete", err)
}
}()

Expand Down
2 changes: 1 addition & 1 deletion db/collection_update.go
Original file line number Diff line number Diff line change
Expand Up @@ -240,7 +240,7 @@ func (c *collection) updateWithFilter(
// If the plan isn't properly closed at any exit point log the error.
defer func() {
if err := selectionPlan.Close(); err != nil {
log.ErrorE(ctx, "Failed to close the selection plan, after filter update", err)
log.ErrorContextE(ctx, "Failed to close the selection plan, after filter update", err)
}
}()

Expand Down
17 changes: 6 additions & 11 deletions db/db.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ import (
blockstore "github.com/ipfs/boxo/blockstore"
ds "github.com/ipfs/go-datastore"
dsq "github.com/ipfs/go-datastore/query"
"github.com/sourcenetwork/corelog"
"github.com/sourcenetwork/immutable"

"github.com/sourcenetwork/defradb/client"
Expand All @@ -30,12 +31,11 @@ import (
"github.com/sourcenetwork/defradb/errors"
"github.com/sourcenetwork/defradb/events"
"github.com/sourcenetwork/defradb/lens"
"github.com/sourcenetwork/defradb/logging"
"github.com/sourcenetwork/defradb/request/graphql"
)

var (
log = logging.MustNewLogger("db")
log = corelog.NewLogger("db")
)

// make sure we match our client interface
Expand Down Expand Up @@ -109,7 +109,6 @@ func NewDB(ctx context.Context, rootstore datastore.RootStore, options ...Option
}

func newDB(ctx context.Context, rootstore datastore.RootStore, options ...Option) (*implicitTxnDB, error) {
log.Debug(ctx, "Loading: internal datastores")
multistore := datastore.MultiStoreFrom(rootstore)

parser, err := graphql.NewParser()
Expand Down Expand Up @@ -197,15 +196,13 @@ func (db *db) initialize(ctx context.Context) error {
}
defer txn.Discard(ctx)

log.Debug(ctx, "Checking if DB has already been initialized...")
exists, err := txn.Systemstore().Has(ctx, ds.NewKey("init"))
if err != nil && !errors.Is(err, ds.ErrNotFound) {
return err
}
// if we're loading an existing database, just load the schema
// and migrations and finish initialization
if exists {
log.Debug(ctx, "DB has already been initialized, continuing")
err = db.loadSchema(ctx, txn)
if err != nil {
return err
Expand All @@ -222,8 +219,6 @@ func (db *db) initialize(ctx context.Context) error {
return txn.Commit(ctx)
}

log.Debug(ctx, "Opened a new DB, needs full initialization")

// init meta data
// collection sequence
_, err = db.getSequence(ctx, txn, core.CollectionIDSequenceKey{})
Expand Down Expand Up @@ -261,16 +256,16 @@ func (db *db) PrintDump(ctx context.Context) error {
// Close is called when we are shutting down the database.
// This is the place for any last minute cleanup or releasing of resources (i.e.: Badger instance).
func (db *db) Close() {
log.Info(context.Background(), "Closing DefraDB process...")
log.Info("Closing DefraDB process...")
if db.events.Updates.HasValue() {
db.events.Updates.Value().Close()
}

err := db.rootstore.Close()
if err != nil {
log.ErrorE(context.Background(), "Failure closing running process", err)
log.ErrorE("Failure closing running process", err)
}
log.Info(context.Background(), "Successfully closed running process")
log.Info("Successfully closed running process")
}

func printStore(ctx context.Context, store datastore.DSReaderWriter) error {
Expand All @@ -286,7 +281,7 @@ func printStore(ctx context.Context, store datastore.DSReaderWriter) error {
}

for r := range results.Next() {
log.Info(ctx, "", logging.NewKV(r.Key, r.Value))
log.InfoContext(ctx, "", corelog.Any(r.Key, r.Value))
}

return results.Close()
Expand Down
2 changes: 1 addition & 1 deletion db/subscriptions.go
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ func (db *db) handleSubscription(
for evt := range pub.Event() {
txn, err := db.NewTxn(ctx, false)
if err != nil {
log.Error(ctx, err.Error())
log.ErrorContext(ctx, err.Error())
continue
}

Expand Down
26 changes: 25 additions & 1 deletion docs/config.md
Original file line number Diff line number Diff line change
Expand Up @@ -63,4 +63,28 @@ https://docs.libp2p.io/concepts/addressing/

Enable libp2p's Circuit relay transport protocol. Defaults to `false`.

https://docs.libp2p.io/concepts/circuit-relay/
https://docs.libp2p.io/concepts/circuit-relay/

## `log.level`

Log level to use. Options are `info` or `error`. Defaults to `info`.

## `log.output`

Log output path. Options are `stderr` or `stdout`. Defaults to `stderr`.

## `log.format`

Log format to use. Options are `text` or `json`. Defaults to `text`.

## `log.stacktrace`

Include stacktrace in error and fatal logs. Defaults to `false`.

## `log.source`

Include source location in logs. Defaults to `false`.

## `log.overrides`

Logger config overrides. Format `<name>,<key>=<val>,...;<name>,...`.
Loading

0 comments on commit d1b1a76

Please sign in to comment.