Skip to content

Commit

Permalink
Slice any request into smaller chunks. Generate better log message.
Browse files Browse the repository at this point in the history
  • Loading branch information
asllop committed Dec 14, 2023
1 parent 5263e37 commit a2b56c4
Showing 1 changed file with 34 additions and 14 deletions.
48 changes: 34 additions & 14 deletions src/newrelic_logging/salesforce.py
Original file line number Diff line number Diff line change
Expand Up @@ -314,23 +314,43 @@ def is_logfile_response(self, records):
# - Check max number of attributes per event (255).

def build_log_from_event(self, records):
logs = []
while True:
part_rows = self.extract_row_slice(records)
if len(part_rows) > 0:
logs.append(self.pack_event_into_log(part_rows))
else:
break
return logs

def pack_event_into_log(self, rows):
log_entries = []
for record in records:
if 'CreatedDate' in record:
timestamp = int(datetime.strptime(record['CreatedDate'], '%Y-%m-%dT%H:%M:%S.%f%z').timestamp() * 1000)
for row in rows:
if 'CreatedDate' in row:
created_date = row['CreatedDate']
timestamp = int(datetime.strptime(created_date, '%Y-%m-%dT%H:%M:%S.%f%z').timestamp() * 1000)
else:
created_date = ""
timestamp = int(datetime.datetime.now().timestamp() * 1000)

message = "SF Event"
if 'attributes' in row and type(row['attributes']) == dict:
attributes = row.pop('attributes', [])
if 'type' in attributes and type(attributes['type']) == str:
message = attributes['type']

if created_date != "":
message = message + " " + created_date

log_entries.append({
#TODO: generate a meaningful message, maybe event type?
'message': "SF Event",
'attributes': record,
'message': message,
'attributes': row,
'timestamp': timestamp
})
return [{
return {
'log_entries': log_entries
}]
}

def build_log_from_logfile(self, session, record):
record_file_name = record['LogFile']
record_id = str(record['Id'])
Expand All @@ -357,7 +377,7 @@ def build_log_from_logfile(self, session, record):
logs = []
row_offset = 0
while True:
part_rows = self.extract_csv_slice(csv_rows)
part_rows = self.extract_row_slice(csv_rows)
part_rows_len = len(part_rows)
if part_rows_len > 0:
logs.append(self.pack_csv_into_log(record, row_offset, part_rows))
Expand Down Expand Up @@ -404,12 +424,12 @@ def pack_csv_into_log(self, record, row_offset, csv_rows):
'log_entries': log_entries
}

# Slice CSV into smaller chunks
def extract_csv_slice(self, csv_rows):
# Slice record into smaller chunks
def extract_row_slice(self, rows):
part_rows = []
i = 0
while len(csv_rows) > 0:
part_rows.append(csv_rows.pop())
while len(rows) > 0:
part_rows.append(rows.pop())
i += 1
if i >= CSV_SLICE_SIZE:
break
Expand Down

0 comments on commit a2b56c4

Please sign in to comment.