Skip to content

Commit

Permalink
feat: enhance logging configuration and stack trace management
Browse files Browse the repository at this point in the history
- Add `log_stack_trace_as` and `log_stack_trace_for` env vars
- Modify `stack_trace` function logic in `log_handler.py`
- Introduce `tracardi/logging.py` for central log config
- Update log condition checks and default bulk size handling
- Change `_get_context_object` to sync in `context.py`
  • Loading branch information
atompie committed Dec 6, 2024
1 parent baf7a99 commit c27af7a
Show file tree
Hide file tree
Showing 3 changed files with 25 additions and 4 deletions.
2 changes: 2 additions & 0 deletions tracardi/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -284,6 +284,8 @@ def __init__(self, env):
self.server_logging_level = _get_logging_level(
env['SERVER_LOGGING_LEVEL']) if 'SERVER_LOGGING_LEVEL' in env else logging.WARNING
self.skip_errors_on_profile_mapping = get_env_as_bool('SKIP_ERRORS_ON_PROFILE_MAPPING', 'no')
self.log_stack_trace_as = env.get('LOG_STACK_TRACE_AS', 'string')
self.log_stack_trace_for = env.get('LOG_STACK_TRACE_AS', 'CRITICAL,ERROR').split(',')

self._config = None
self._unset_secrets()
Expand Down
20 changes: 16 additions & 4 deletions tracardi/exceptions/log_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import sys

from tracardi.context import get_context, ContextError
from tracardi.logging import log_stack_trace_as, log_stack_trace_for, log_bulk_size
from tracardi.service.adapter.logger.logger_adapter import log_format_adapter
from tracardi.service.logging.tools import _get_logging_level
from tracardi.service.utils.date import now_in_utc
Expand All @@ -18,8 +19,8 @@


def stack_trace(level):
if level not in ["ERROR", "CRITICAL", "WARNING"]:
return []
if level not in log_stack_trace_for:
return {}

# Extract the traceback object
tb = sys.exc_info()[2]
Expand Down Expand Up @@ -121,14 +122,23 @@ def emit(self, record: LogRecord):
if record.levelno <= 25:
return

_trace = stack_trace(record.levelname)
if log_stack_trace_as == 'json':
if _trace:
stack_trace_str = f"JSON:{json.dumps(_trace)}"
else:
stack_trace_str = None
else:
stack_trace_str = record.stack_info

log = { # Maps to tracardi-log index
"date": now_in_utc(),
"message": record.msg,
"logger": record.name,
"file": record.filename,
"line": record.lineno,
"level": record.levelname,
"stack_info": json.dumps(stack_trace(record.levelname)),
"stack_info": stack_trace_str,
# "exc_info": record.exc_info # Can not save this to TrackerPayload
"module": self._get(record, "package", record.module),
"class_name": self._get(record, "class_name", record.funcName),
Expand All @@ -142,7 +152,9 @@ def emit(self, record: LogRecord):

self.collection.append(log)

def has_logs(self, min_log_size=500):
def has_logs(self, min_log_size=None):
if min_log_size is None:
min_log_size = log_bulk_size
if not isinstance(self.collection, list):
return False
return len(self.collection) >= min_log_size or (time() - self.last_save) > 60
Expand Down
7 changes: 7 additions & 0 deletions tracardi/logging.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
import os

from tracardi.service.utils.environment import get_env_as_int

log_stack_trace_as = os.environ.get('LOG_STACK_TRACE_AS', 'string')
log_stack_trace_for = os.environ.get('LOG_STACK_TRACE_AS', 'CRITICAL,ERROR').split(',')
log_bulk_size = get_env_as_int('LOG_BULK_SIZE', 500)

0 comments on commit c27af7a

Please sign in to comment.