diff --git a/Solutions/Oracle Cloud Infrastructure/Data Connectors/AzureFunctionOCILogs/main.py b/Solutions/Oracle Cloud Infrastructure/Data Connectors/AzureFunctionOCILogs/main.py index 94aef01a2a3..10fc9417eac 100644 --- a/Solutions/Oracle Cloud Infrastructure/Data Connectors/AzureFunctionOCILogs/main.py +++ b/Solutions/Oracle Cloud Infrastructure/Data Connectors/AzureFunctionOCILogs/main.py @@ -17,7 +17,7 @@ StreamOcid = os.environ['StreamOcid'] WORKSPACE_ID = os.environ['AzureSentinelWorkspaceId'] SHARED_KEY = os.environ['AzureSentinelSharedKey'] -LOG_TYPE = 'OCI_Logs' +LOG_TYPE = 'OCI_LogsV2' CURSOR_TYPE = os.getenv('CursorType', 'group') MAX_SCRIPT_EXEC_TIME_MINUTES = 5 PARTITIONS = os.getenv('Partition',"0") @@ -41,7 +41,7 @@ def main(mytimer: func.TimerRequest): config = get_config() oci.config.validate_config(config) - sentinel_connector = AzureSentinelConnector(LOG_ANALYTICS_URI, WORKSPACE_ID, SHARED_KEY, LOG_TYPE, queue_size=2000) + sentinel_connector = AzureSentinelConnector(LOG_ANALYTICS_URI, WORKSPACE_ID, SHARED_KEY, queue_size=2000) stream_client = oci.streaming.StreamClient(config, service_endpoint=MessageEndpoint) @@ -53,6 +53,23 @@ def main(mytimer: func.TimerRequest): process_events(stream_client, StreamOcid, cursor, limit, sentinel_connector, start_ts) logging.info(f'Function finished. Sent events {sentinel_connector.successfull_sent_events_number}.') + +def determine_log_type(event): + """ + Determine the Azure Sentinel log type based on the event type. + """ + # event_type = event.get("type", "default") + if event["type"] == "com.oraclecloud.loadbalancer.access" or event["type"] == "com.oraclecloud.loadbalancer.error" or event["type"]=="com.oraclecloud.OraLB-API.ListLoadBalancers": + return "OCI_LoadBalancerLogs" + # elif event_type == "com.oraclecloud.loadbalancer.error": + # return "OCI_LoadBalancerLogs" + if event["type"] == "com.oraclecloud.Audit.ListEvents": + return "OCI_AuditLogs" + if event["type"] == "com.oraclecloud.vcn.flowlogs.DataEvent" or event["type"] == "com.oraclecloud.vcn.flowlogs.QualityEvent.NoData" or event["type"] == "com.oraclecloud.virtualNetwork.GetVcn" or event["type"] == "com.oraclecloud.virtualNetwork.ListVcns" or event["type"] == "com.oraclecloud.vcn.flowlogs.QualityEvent.SkipData" or event["type"] == "com.oraclecloud.virtualNetwork.GetVcnDnsResolverAssociation": + return "OCI_VirtualNetworkLogs" + else: + return "OCI_LogsV2" # Default log type + def parse_key(key_input): try: begin_line = re.search(r'-----BEGIN [A-Z ]+-----', key_input).group() @@ -127,6 +144,10 @@ def process_events(client: oci.streaming.StreamClient, stream_id, initial_cursor #if event != 'ok' and event != 'Test': event = json.loads(event) if "data" in event: + log_type = determine_log_type(event) + logging.info( + '{} Log type value after determining the log type'.format(log_type)) + sentinel.log_type = log_type if "request" in event["data"] and event["type"] != "com.oraclecloud.loadbalancer.access": if event["data"]["request"] is not None and "headers" in event["data"]["request"]: event["data"]["request"]["headers"] = json.dumps(event["data"]["request"]["headers"]) @@ -143,7 +164,7 @@ def process_events(client: oci.streaming.StreamClient, stream_id, initial_cursor if event["data"]["stateChange"] is not None and "current" in event["data"]["stateChange"] : event["data"]["stateChange"]["current"] = json.dumps( event["data"]["stateChange"]["current"]) - sentinel.send(event) + sentinel.send(event, log_type) sentinel.flush() if check_if_script_runs_too_long(start_ts): diff --git a/Solutions/Oracle Cloud Infrastructure/Data Connectors/AzureFunctionOCILogs/sentinel_connector.py b/Solutions/Oracle Cloud Infrastructure/Data Connectors/AzureFunctionOCILogs/sentinel_connector.py index ee8b2b5a254..635f6c0a064 100644 --- a/Solutions/Oracle Cloud Infrastructure/Data Connectors/AzureFunctionOCILogs/sentinel_connector.py +++ b/Solutions/Oracle Cloud Infrastructure/Data Connectors/AzureFunctionOCILogs/sentinel_connector.py @@ -9,20 +9,20 @@ class AzureSentinelConnector: - def __init__(self, log_analytics_uri, workspace_id, shared_key, log_type, queue_size=200, queue_size_bytes=25 * (2**20)): + def __init__(self, log_analytics_uri, workspace_id, shared_key, queue_size=200, queue_size_bytes=25 * (2**20)): self.log_analytics_uri = log_analytics_uri self.workspace_id = workspace_id self.shared_key = shared_key - self.log_type = log_type self.queue_size = queue_size - self.bulks_number = 1 self.queue_size_bytes = queue_size_bytes + self.bulks_number = 1 self._queue = [] self._bulks_list = [] self.successfull_sent_events_number = 0 + self.failed_sent_events_number = 0 - def send(self, event): - self._queue.append(event) + def send(self, event, log_type): + self._queue.append((event, log_type)) if len(self._queue) >= self.queue_size: self.flush(force=False) @@ -39,12 +39,22 @@ def flush(self, force=True): def _flush_bulks(self): for queue in self._bulks_list: if queue: - queue_list = self._split_big_request(queue) - for q in queue_list: - self._post_data(self.workspace_id, self.shared_key, q, self.log_type) - + grouped_events = self._group_events_by_log_type(queue) + for log_type, events in grouped_events.items(): + queue_list = self._split_big_request(events) + for q in queue_list: + self._post_data(self.workspace_id, + self.shared_key, q, log_type) self._bulks_list = [] + def _group_events_by_log_type(self, queue): + grouped_events = {} + for event, log_type in queue: + if log_type not in grouped_events: + grouped_events[log_type] = [] + grouped_events[log_type].append(event) + return grouped_events + def is_empty(self): return not self._queue and not self._bulks_list @@ -91,18 +101,18 @@ def _post_data(self, workspace_id, shared_key, body, log_type): time.sleep(try_number) try_number += 1 else: - logging.error(str(err)) + logging.error(f"Failed after retries. Error: {err}") self.failed_sent_events_number += events_number raise err else: - logging.info('{} events have been successfully sent to Azure Sentinel'.format(events_number)) + logging.info(f"{events_number} events successfully sent with log type: {log_type}") self.successfull_sent_events_number += events_number break def _make_request(self, uri, body, headers): response = requests.post(uri, data=body, headers=headers) if not (200 <= response.status_code <= 299): - raise Exception("Error during sending events to Azure Sentinel. Response code: {}".format(response.status)) + raise Exception(f"Error sending events to Azure Sentinel. Response code: {response.status}") def _check_size(self, queue): data_bytes_len = len(json.dumps(queue).encode()) diff --git a/Solutions/Oracle Cloud Infrastructure/Data Connectors/OCILogsConn.zip b/Solutions/Oracle Cloud Infrastructure/Data Connectors/OCILogsConn.zip index 7958dd5c045..74ee3f97c7c 100644 Binary files a/Solutions/Oracle Cloud Infrastructure/Data Connectors/OCILogsConn.zip and b/Solutions/Oracle Cloud Infrastructure/Data Connectors/OCILogsConn.zip differ diff --git a/Solutions/Oracle Cloud Infrastructure/Parsers/OCILogs.yaml b/Solutions/Oracle Cloud Infrastructure/Parsers/OCILogs.yaml index f06a071e05f..ef650956a25 100644 --- a/Solutions/Oracle Cloud Infrastructure/Parsers/OCILogs.yaml +++ b/Solutions/Oracle Cloud Infrastructure/Parsers/OCILogs.yaml @@ -7,7 +7,7 @@ Category: Microsoft Sentinel Parser FunctionName: OCILogs FunctionAlias: OCILogs FunctionQuery: | - OCI_Logs_CL + let OCI_Logs = view () { union isfuzzy=true OCI_LoadBalancerLogs_CL, OCI_AuditLogs_CL, OCI_VirtualNetworkLogs_CL, OCI_ComputeInstanceLogs_CL, OCI_LogsV2_CL, OCI_Logs_CL | extend EventVendor = 'Oracle' | extend EventProduct = 'Oracle Cloud Infrastructure' | extend EventStartTime = coalesce(unixtime_seconds_todatetime(column_ifexists("data_startTime_d", long(null))), time_t) @@ -26,4 +26,6 @@ FunctionQuery: | | project-rename EventMessage=data_message_s | project-rename HttpUserAgentOriginal=data_identity_userAgent_s | project-rename HttpStatusCode=data_response_status_s - | project-rename HttpRequestMethod=data_request_action_s \ No newline at end of file + | project-rename HttpRequestMethod=data_request_action_s + }; + OCI_Logs() \ No newline at end of file