Skip to content

Commit

Permalink
small refactor / moving stuff / cleaner output
Browse files Browse the repository at this point in the history
  • Loading branch information
edznux-dd committed Oct 10, 2024
1 parent 84b4899 commit d2f62a3
Show file tree
Hide file tree
Showing 5 changed files with 96 additions and 289 deletions.
2 changes: 1 addition & 1 deletion pkg/collector/k8s_api.go
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ func NewK8sAPICollector(ctx context.Context, cfg *config.KubehoundConfig) (Colle
// log.WithCollectedCluster(clusterName),

if !cfg.Collector.NonInteractive {
l.Warnf("About to dump k8s cluster: %q - Do you want to continue ? [Yes/No]", clusterName)
l.Warn("About to dump k8s cluster - Do you want to continue ? [Yes/No]", log.String("cluster", clusterName))
proceed, err := cmd.AskForConfirmation(ctx)
if err != nil {
return nil, err
Expand Down
87 changes: 87 additions & 0 deletions pkg/telemetry/log/fields.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package log

import (
"context"
"fmt"
"reflect"
"strconv"
Expand All @@ -9,8 +10,94 @@ import (
"github.com/pkg/errors"
"go.uber.org/zap"
"go.uber.org/zap/zapcore"
ddtrace "gopkg.in/DataDog/dd-trace-go.v1/ddtrace/tracer"
)

const (
FieldK8sTypeKey = "k8s_type"
FieldCountKey = "count"
FieldNodeTypeKey = "node_type"
FieldVertexTypeKey = "vertex_type"
FieldClusterKey = "cluster"
FieldComponentKey = "component"
FieldRunIDKey = "run_id"
FieldTeamKey = "team"
FieldServiceKey = "service"
FieldIngestorPipelineKey = "ingestor_pipeline"
FieldDumpPipelineKey = "dump_pipeline"
)

type contextKey int

const (
ContextFieldRunID contextKey = iota
ContextFieldCluster
)

func convertField(value any) string {
val, err := value.(string)
if !err {
return ""
}
return val
}

func SpanSetDefaultField(ctx context.Context, span ddtrace.Span) {
runID := convertField(ctx.Value(ContextFieldRunID))
if runID != "" {
span.SetTag(FieldRunIDKey, convertField(runID))
}

cluster := convertField(ctx.Value(ContextFieldCluster))
if cluster != "" {
span.SetTag(FieldClusterKey, convertField(cluster))
}
}

func FieldK8sType(k8sType string) string {
return fmt.Sprintf("%s:%s", FieldK8sTypeKey, k8sType)
}

func FieldCount(count int) string {
return fmt.Sprintf("%s:%d", FieldCountKey, count)
}

func FieldNodeType(nodeType string) string {
return fmt.Sprintf("%s:%s", FieldNodeTypeKey, nodeType)
}

func FieldVertexType(vertexType string) string {
return fmt.Sprintf("%s:%s", FieldVertexTypeKey, vertexType)
}

func FieldCluster(cluster string) string {
return fmt.Sprintf("%s:%s", FieldClusterKey, cluster)
}

func FieldComponent(component string) string {
return fmt.Sprintf("%s:%s", FieldComponentKey, component)
}

func FieldRunID(runID string) string {
return fmt.Sprintf("%s:%s", FieldRunIDKey, runID)
}

func FieldTeam(team string) string {
return fmt.Sprintf("%s:%s", FieldTeamKey, team)
}

func FieldService(service string) string {
return fmt.Sprintf("%s:%s", FieldServiceKey, service)
}

func FieldIngestorPipeline(ingestorPipeline string) string {
return fmt.Sprintf("%s:%s", FieldIngestorPipelineKey, ingestorPipeline)
}

func FieldDumpPipeline(dumpPipeline string) string {
return fmt.Sprintf("%s:%s", FieldDumpPipelineKey, dumpPipeline)
}

// Field aliased here to make it easier to adopt this package
type Field = zapcore.Field

Expand Down
233 changes: 3 additions & 230 deletions pkg/telemetry/log/logger.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,10 @@ package log

import (
"context"
"fmt"
"sync/atomic"

"go.uber.org/zap"
"go.uber.org/zap/zapcore"
ddtrace "gopkg.in/DataDog/dd-trace-go.v1/ddtrace/tracer"
)

// globalDefault contains the current global default logger and its configuration.
Expand Down Expand Up @@ -66,91 +64,6 @@ func Logger(ctx context.Context) LoggerI {
}
}

const (
FieldK8sType = "k8s_type"
FieldCount = "count"
FieldNodeType = "node_type"
FieldVertexType = "vertex_type"
FieldCluster = "cluster"
FieldComponent = "component"
FieldRunID = "run_id"
FieldTeam = "team"
FieldService = "service"
FieldIngestorPipeline = "ingestor_pipeline"
FieldDumpPipeline = "dump_pipeline"
)

type contextKey int

const (
ContextFieldRunID contextKey = iota
ContextFieldCluster
)

func convertTag(value any) string {
val, err := value.(string)
if !err {
return ""
}
return val
}

func SpanSetDefaultTag(ctx context.Context, span ddtrace.Span) {
runID := convertTag(ctx.Value(ContextFieldRunID))
if runID != "" {
span.SetTag(FieldRunID, convertTag(runID))
}

cluster := convertTag(ctx.Value(ContextFieldCluster))
if cluster != "" {
span.SetTag(FieldRunID, convertTag(cluster))
}
}

func TagK8sType(k8sType string) string {
return fmt.Sprintf("%s:%s", FieldK8sType, k8sType)
}

func TagCount(count int) string {
return fmt.Sprintf("%s:%d", FieldCount, count)
}

func TagNodeType(nodeType string) string {
return fmt.Sprintf("%s:%s", FieldNodeType, nodeType)
}

func TagVertexType(vertexType string) string {
return fmt.Sprintf("%s:%s", FieldVertexType, vertexType)
}

func TagCluster(cluster string) string {
return fmt.Sprintf("%s:%s", FieldCluster, cluster)
}

func TagComponent(component string) string {
return fmt.Sprintf("%s:%s", FieldComponent, component)
}

func TagRunID(runID string) string {
return fmt.Sprintf("%s:%s", FieldRunID, runID)
}

func TagTeam(team string) string {
return fmt.Sprintf("%s:%s", FieldTeam, team)
}

func TagService(service string) string {
return fmt.Sprintf("%s:%s", FieldService, service)
}

func TagIngestorPipeline(ingestorPipeline string) string {
return fmt.Sprintf("%s:%s", FieldIngestorPipeline, ingestorPipeline)
}

func TagDumpPipeline(dumpPipeline string) string {
return fmt.Sprintf("%s:%s", FieldDumpPipeline, dumpPipeline)
}

const (
spanIDKey = "dd.span_id"
traceIDKey = "dd.trace_id"
Expand All @@ -170,10 +83,12 @@ func init() {
if err != nil {
panic(err)
}
// TOOD: use the env var to setup the formatter (json / text / dd ...)

cfg := &Config{
logLevel: LevelInfo,
formatter: "json",
formatter: "text",
useColour: true,
}
l := &traceLogger{
logger: newLoggerWithSkip(cfg, 1),
Expand Down Expand Up @@ -206,145 +121,3 @@ func newLoggerWithSkip(cfg *Config, skip int) *zapLogger {
s: logger.Sugar(),
}
}

// type LoggerOption func(*logrus.Entry) *logrus.Entry

// type LoggerConfig struct {
// Tags logrus.Fields // Tags applied to all logs.
// Mu *sync.Mutex // Lock to enable safe runtime changes.
// DD bool // Whether Datadog integration is enabled.
// }

// var globalConfig = LoggerConfig{
// Tags: logrus.Fields{
// globals.TagService: globals.DDServiceName,
// globals.TagComponent: globals.DefaultComponent,
// },
// Mu: &sync.Mutex{},
// DD: true,
// }

// I Global logger instance for use through the app
// var I = Base()

// Require our logger to append job or API related fields for easier filtering and parsing
// of logs within custom dashboards. Sticking to the "structured" log types also enables
// out of the box correlation of APM traces and log messages without the need for a custom
// index pipeline. See: https://docs.datadoghq.com/logs/log_collection/go/#configure-your-logger
// type KubehoundLogger struct {
// *logrus.Entry
// }

// // traceID retrieves the trace ID from the provided span.
// func traceID(span tracer.Span) string {
// traceID := span.Context().TraceID()

// return strconv.FormatUint(traceID, 10)
// }

// // traceID retrieves the span ID from the provided span.
// func spanID(span tracer.Span) string {
// spanID := span.Context().SpanID()

// return strconv.FormatUint(spanID, 10)
// }

// // Base returns the base logger for the application.
// func Base() *KubehoundLogger {
// logger := logrus.WithFields(globalConfig.Tags)
// logger.Logger.SetFormatter(GetLogrusFormatter())

// return &KubehoundLogger{logger}
// }

// // SetDD enables/disabled Datadog integration in the logger.
// func SetDD(enabled bool) {
// globalConfig.Mu.Lock()
// defer globalConfig.Mu.Unlock()

// globalConfig.DD = enabled

// // Replace the current logger instance to reflect changes
// I = Base()
// }

// // AddGlobalTags adds global tags to all application loggers.
// func AddGlobalTags(tags map[string]string) {
// globalConfig.Mu.Lock()
// defer globalConfig.Mu.Unlock()

// for tk, tv := range tags {
// globalConfig.Tags[tk] = tv
// }

// // Replace the current logger instance to reflect changes
// I = Base()
// }

// // WithComponent adds a component name tag to the logger.
// func WithComponent(name string) LoggerOption {
// return func(l *logrus.Entry) *logrus.Entry {
// return l.WithField(globals.TagComponent, name)
// }
// }

// // WithCollectedCluster adds a component name tag to the logger.
// func WithCollectedCluster(name string) LoggerOption {
// return func(l *logrus.Entry) *logrus.Entry {
// return l.WithField(globals.CollectedClusterComponent, name)
// }
// }

// // WithRunID adds a component name tag to the logger.
// func WithRunID(runid string) LoggerOption {
// return func(l *logrus.Entry) *logrus.Entry {
// return l.WithField(globals.RunID, runid)
// }
// }

// // Trace creates a logger from the current context, attaching trace and span IDs for use with APM.
// func Trace(ctx context.Context, opts ...LoggerOption) *KubehoundLogger {
// baseLogger := Base()

// span, ok := tracer.SpanFromContext(ctx)
// if !ok {
// return baseLogger
// }

// if !globalConfig.DD {
// return baseLogger
// }

// logger := baseLogger.WithFields(logrus.Fields{
// "dd.span_id": spanID(span),
// "dd.trace_id": traceID(span),
// })

// for _, o := range opts {
// logger = o(logger)
// }

// return &KubehoundLogger{logger}
// }

// func GetLogrusFormatter() logrus.Formatter {
// customTextFormatter := NewFilteredTextFormatter(DefaultRemovedFields)

// switch logFormat := os.Getenv("KH_LOG_FORMAT"); {
// // Datadog require the logged field to be "message" and not "msg"
// case logFormat == "dd":
// formatter := &logrus.JSONFormatter{
// FieldMap: logrus.FieldMap{
// logrus.FieldKeyMsg: "message",
// },
// }

// return formatter
// case logFormat == "json":
// return &logrus.JSONFormatter{}
// case logFormat == "text":
// return customTextFormatter
// default:
// return customTextFormatter
// }
// }
Loading

0 comments on commit d2f62a3

Please sign in to comment.