From 5263e371efb21f3972a09f8cd7bdc4972f8e9898 Mon Sep 17 00:00:00 2001 From: Andreu Date: Thu, 14 Dec 2023 11:09:18 +0100 Subject: [PATCH] Use datetime to parse CreatedDate instead of external dependency dateutil. --- requirements.txt | Bin 620 -> 572 bytes src/newrelic_logging/salesforce.py | 12 +++++++----- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/requirements.txt b/requirements.txt index 32e574e1f02190fc3bba530da47ca054aed021dd..3610aaf9b6834b44edef1d887794978bdfa611a1 100644 GIT binary patch delta 7 OcmaFEvWI1Z4HEzi*#e&c delta 56 zcmdnP@`h!D4U+*c0~bR9LnT8ALk2@WLmq=JLkdG8keA9(3M4Zbav16uY#EFg^cXCF H*oXlDV@V3c diff --git a/src/newrelic_logging/salesforce.py b/src/newrelic_logging/salesforce.py index 378ffab..45ebbe0 100644 --- a/src/newrelic_logging/salesforce.py +++ b/src/newrelic_logging/salesforce.py @@ -3,7 +3,6 @@ import json import sys from datetime import datetime, timedelta -from dateutil.parser import parse import jwt from cryptography.hazmat.primitives import serialization @@ -309,18 +308,21 @@ def is_logfile_response(self, records): else: return True - # TODO: Use alternative timestamp attribute to avoid API limits (48 hours old) + # TODO: Ensure NR API limits: + # - Use alternative timestamp attribute to avoid time limits (48h for Log API, 24h for Event API). + # - Check attribute key and value size limits (255 and 4094 bytes respectively). + # - Check max number of attributes per event (255). def build_log_from_event(self, records): log_entries = [] for record in records: if 'CreatedDate' in record: - timestamp = int(parse(record['CreatedDate']).timestamp() * 1000) + timestamp = int(datetime.strptime(record['CreatedDate'], '%Y-%m-%dT%H:%M:%S.%f%z').timestamp() * 1000) else: timestamp = int(datetime.datetime.now().timestamp() * 1000) log_entries.append({ - #TODO: generate a meaningful message + #TODO: generate a meaningful message, maybe event type? 'message': "SF Event", 'attributes': record, 'timestamp': timestamp @@ -402,7 +404,7 @@ def pack_csv_into_log(self, record, row_offset, csv_rows): 'log_entries': log_entries } - # Slice CSV into smaller groups + # Slice CSV into smaller chunks def extract_csv_slice(self, csv_rows): part_rows = [] i = 0