Skip to content

Commit

Permalink
Merge pull request #9317 from Azure/CrowdStrikeV2
Browse files Browse the repository at this point in the history
CrowdStrike v2 fixes
  • Loading branch information
v-atulyadav authored Nov 6, 2023
2 parents 53c8f2e + ddad6eb commit e7b4e5a
Show file tree
Hide file tree
Showing 9 changed files with 106 additions and 1,387 deletions.

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -133,8 +133,8 @@
"name": "CrowdStrike_AWS_Key",
"type": "Microsoft.Common.TextBox",
"label": "AWS Access Key ID",
"placeholder": "AKIARJFBAG3EGHFG2FPN",
"toolTip": "Enter valid AWS Key Id. For example AKIARKFBAG3EGIFG9FPN",
"placeholder": "AKIPRJFBAG3EGHFG2FPN",
"toolTip": "Enter valid AWS Key Id. For example AKIPRJFBAG3EGHFG2FPN",
"constraints": {
"required": true,
"regex": "([A-Z0-9+/]{20})",
Expand All @@ -146,8 +146,8 @@
"name": "CrowdStrike_AWS_Secret",
"type": "Microsoft.Common.TextBox",
"label": "AWS Secret Access Key",
"placeholder": "Js6IDrwAIkvSY+8fSJ5bcep05ENlNvXgc+JRRr7Y",
"toolTip": "Enter valid AWS Secret key. For example. For example Js6IDrpAIkvSS+8fSK5bcep05EMlNvXgc+JRRr7Y ",
"placeholder": "Js6IDopAIkvSY+8fSJ5bcep05ENlNvXgc+JRRr7Y",
"toolTip": "Enter valid AWS Secret key. For example. For example Js6IDopAIkvSY+8fSK5bcep05EMlNvXgc+JRRr7Y ",
"constraints": {
"required": true,
"regex": "([a-zA-Z0-9+/]{40})",
Expand Down Expand Up @@ -192,8 +192,8 @@
"name": "AADTenantId",
"type": "Microsoft.Common.TextBox",
"label": "AAD Tenant Id",
"placeholder": "72f988bf-86f1-41af-91ab-2d7cd011db47",
"toolTip": "If you dont have AAD application created, create one by following [instructions provided here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application).Copy Tenant Id and enter here. For example: 72f988bf-86f1-41af-91ab-2d7cd011db47",
"placeholder": "87f988bf-86f1-41af-91ab-2d7cd011db47",
"toolTip": "If you dont have AAD application created, create one by following [instructions provided here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application).Copy Tenant Id and enter here. For example: 87f988bf-86f1-41af-91ab-2d7cd011db47",
"constraints": {
"required": true,
"regex": "(^[{]?[0-9a-fA-F]{8}-([0-9a-fA-F]{4}-){3}[0-9a-fA-F]{12}[}]?$)",
Expand All @@ -205,8 +205,8 @@
"name": "AADApplicationId",
"type": "Microsoft.Common.TextBox",
"label": "AAD App (client) Id",
"placeholder": "969f7b17-415f-4d01-8ab5-7a7db3aa39cb",
"toolTip": "If you dont have AAD application created, create one by following [instructions provided here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application).Copy Application Id and enter here. For example: 969f7b17-415f-4d01-8ab5-7a7db3aa39cb",
"placeholder": "899f7b17-415f-4d01-8ab5-7a7db3aa39cb",
"toolTip": "If you dont have AAD application created, create one by following [instructions provided here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application).Copy Application Id and enter here. For example: 899f7b17-415f-4d01-8ab5-7a7db3aa39cb",
"constraints": {
"required": true,
"regex": "(^[{]?[0-9a-fA-F]{8}-([0-9a-fA-F]{4}-){3}[0-9a-fA-F]{12}[}]?$)",
Expand All @@ -218,8 +218,8 @@
"name": "AADPrincipalId",
"type": "Microsoft.Common.TextBox",
"label": "AAD Principal Id",
"placeholder": "69925b10-2dc5-4b1e-9340-ba6b993b82dd",
"toolTip": "If you dont have AAD application created, create one by following [instructions provided here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application).Copy Object Id of AAD app from [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and enter here. For example: 69925b10-2dc5-4b1e-9340-ba6b993b82dd",
"placeholder": "61125b10-2dc5-4b1e-9340-ba6b993b82dd",
"toolTip": "If you dont have AAD application created, create one by following [instructions provided here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal#create-azure-ad-application).Copy Object Id of AAD app from [AAD Portal](https://aad.portal.azure.com/#view/Microsoft_AAD_IAM/StartboardApplicationsMenuBlade/~/AppAppsPreview/menuId/) and enter here. For example: 61125b10-2dc5-4b1e-9340-ba6b993b82dd",
"constraints": {
"required": true,
"regex": "(^[{]?[0-9a-fA-F]{8}-([0-9a-fA-F]{4}-){3}[0-9a-fA-F]{12}[}]?$)",
Expand Down Expand Up @@ -251,12 +251,12 @@
"name": "Expected_EPS_volume",
"type": "Microsoft.Common.Slider",
"min": 1,
"max": 120000,
"max": 150000,
"label": "CrowdStrike Ingestion EPS (Approx)",
"subLabel": "EPS",
"defaultValue": 40000,
"defaultValue": 50000,
"showStepMarkers": false,
"toolTip": "Pick the expected ingestion EPS for this connector. We use this to determine the function app plan that requires to handles this workloads",
"toolTip": "Pick the expected ingestion EPS for this connector. We use this to determine the function app plan that requires to handles this workloads. This is just an indication and we dont charge you based on this.",
"constraints": {
"required": true
},
Expand All @@ -265,29 +265,20 @@
{
"name": "FunctionAppSelectedPlanConsumption",
"type": "Microsoft.Common.InfoBox",
"visible": "[lessOrEquals(steps('AzureFunctionsAppConfig').CrowdStrikeEPSSection.Expected_EPS_volume,40000)]",
"visible": "[lessOrEquals(steps('AzureFunctionsAppConfig').CrowdStrikeEPSSection.Expected_EPS_volume,100000)]",
"options": {
"icon": "Info",
"text": "Based on the EPS you have selected above, we are auto selecting <b>Consumption</b> plan for azure functions. To know more details about different plans, you can visit https://learn.microsoft.com/en-us/azure/azure-functions/functions-scale#overview-of-plans"
"text": "Based on the EPS you have selected above, we are auto selecting <b>Consumption</b> plan for azure functions. To deploy EP1 instance (purely based on your need), select EPS > 100,000. To know more details about different plans, you can visit https://learn.microsoft.com/en-us/azure/azure-functions/functions-scale#overview-of-plans"
}
},
{
"name": "FunctionAppSelectedPlanEP1",
"type": "Microsoft.Common.InfoBox",
"visible": "[and(greater(steps('AzureFunctionsAppConfig').CrowdStrikeEPSSection.Expected_EPS_volume,40000),lessOrEquals(steps('AzureFunctionsAppConfig').CrowdStrikeEPSSection.Expected_EPS_volume,60000))]",
"visible": "[and(greater(steps('AzureFunctionsAppConfig').CrowdStrikeEPSSection.Expected_EPS_volume,100000),lessOrEquals(steps('AzureFunctionsAppConfig').CrowdStrikeEPSSection.Expected_EPS_volume,150000))]",
"options": {
"icon": "Info",
"text": "Based on the EPS you have selected above, we are auto selecting <b>Elastic Premium EP1</b> plan for azure functions. Creating multiple instances with lower EPS is a recommended and cost effective solution. To create multiple instances, redeploy the data connector with increamented Instance number. To know more details about different plans, you can visit https://learn.microsoft.com/en-us/azure/azure-functions/functions-scale#overview-of-plans"
}
},
{
"name": "FunctionAppSelectedPlanEP2",
"type": "Microsoft.Common.InfoBox",
"visible": "[greater(steps('AzureFunctionsAppConfig').CrowdStrikeEPSSection.Expected_EPS_volume,60000)]",
"options": {
"icon": "Info",
"text": "Based on the EPS you have selected above, we are auto selecting <b>Elastic Premium EP2</b> plan for azure functions. Creating multiple instances with lower EPS is a recommended and cost effective solution. To create multiple instances, redeploy the data connector with increamented Instance number. To know more details about different plans, you can visit https://learn.microsoft.com/en-us/azure/azure-functions/functions-scale#overview-of-plans"
}
}
]
}
Expand Down
Binary file not shown.
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,11 @@
import time
from datetime import datetime

QUEUE_URL = os.environ['QUEUE_URL']

AWS_KEY = os.environ['AWS_KEY']
AWS_SECRET = os.environ['AWS_SECRET']
AWS_REGION_NAME = os.environ['AWS_REGION_NAME']
AZURE_TENANT_ID = os.environ['AZURE_TENANT_ID']
LINE_SEPARATOR = os.environ.get('lineSeparator', '[\n\r\x0b\v\x0c\f\x1c\x1d\x85\x1e\u2028\u2029]+')
AZURE_TENANT_ID = os.environ['AZURE_TENANT_ID']
AZURE_CLIENT_ID = os.environ['AZURE_CLIENT_ID']
Expand All @@ -28,6 +29,7 @@
NORMALIZED_SCHEMA_NAMES = '{"Dns": "Custom-CrowdstrikeDns","File": "Custom-CrowdstrikeFile","Process": "Custom-CrowdstrikeProcess","Network": "Custom-CrowdstrikeNetwork","Auth": "Custom-CrowdstrikeAuth","Registry": "Custom-CrowdstrikeRegistry","Audit": "Custom-CrowdstrikeAudit","User": "Custom-CrowdstrikeUser","Additional": "Custom-CrowdstrikeAdditional"}'
CUSTOM_SCHEMA_NAMES = '{"Dns": "Custom-CrowdstrikeDns","File": "Custom-CrowdstrikeFile","Process": "Custom-CrowdstrikeProcess","Network": "Custom-CrowdstrikeNetwork","Auth": "Custom-CrowdstrikeAuth","Registry": "Custom-CrowdstrikeRegistry","Audit": "Custom-CrowdstrikeAudit","User": "Custom-CrowdstrikeUser"}'
REQUIRE_RAW_STRING = os.environ.get('USER_SELECTION_REQUIRE_RAW', 'false')
REQUIRE_SECONDARY_STRING = os.environ.get('USER_SELECTION_REQUIRE_SECONDARY', 'false')
SECONDARY_DATA_SCHEMA = "Custom-CrowdStrikeSecondary"
EVENT_TO_TABLE_MAPPING_LINK = os.environ.get('EVENT_TO_TABLE_MAPPING_LINK', 'https://aka.ms/CrowdStrikeEventsToTableMapping')
REQUIRED_FIELDS_SCHEMA_LINK = os.environ.get('REQUIRED_FIELDS_SCHEMA_LINK', 'https://aka.ms/CrowdStrikeRequiredFieldsSchema')
Expand All @@ -50,8 +52,7 @@ def _create_s3_client():
)

async def main(msg: func.QueueMessage) -> None:
logging.info('Starting script')
logging.info("Required Raw String - {}".format(REQUIRE_RAW))
logging.info("Starting script. Parameter Selection- REQUIRE_RAW_STRING: {} REQUIRE_SECONDARY_STRING: {} AZURE_TENANT_ID: {} AZURE_CLIENT_ID: {} AZURE_CLIENT_SECRET: ItsASecret AWS_KEY: {} AWS_REGION_NAME: {} AWS_SECRET: IWontReveal NORMALIZED_DCE_ENDPOINT: {} RAW_DATA_DCE_ENDPOINT: {} NORMALIZED_DCR_ID: {} RAW_DATA_DCR_ID: {} ".format(REQUIRE_RAW_STRING, REQUIRE_SECONDARY_STRING, AZURE_TENANT_ID, AZURE_CLIENT_ID, AWS_KEY, AWS_REGION_NAME, NORMALIZED_DCE_ENDPOINT, RAW_DATA_DCE_ENDPOINT, NORMALIZED_DCR_ID, RAW_DATA_DCR_ID))
link = ""
bucket = ""
messageId = ""
Expand All @@ -64,7 +65,7 @@ async def main(msg: func.QueueMessage) -> None:
bucket = req_body.get('bucket')
messageId = req_body.get('messageId')

logging.info("Processing {} file from {} bucket of {} messageId".format(link,bucket,messageId))
logging.info("Information received from Azure Storage queue. S3file: {} S3Bucket: {} SQSMessageId: {}".format(link,bucket,messageId))

eventsSchemaMapping = FileHelper(
EVENT_TO_TABLE_MAPPING_LINK,
Expand All @@ -83,15 +84,19 @@ async def main(msg: func.QueueMessage) -> None:
async with _create_s3_client() as client:
async with aiohttp.ClientSession() as session:
if link:
logging.info("Processing file {}".format(link))
try:
if "fdrv2/" in link:
logging.info('Processing a secondary data bucket.')
logging.info("Started processing a secondary data fdrv2 bucket. S3file: {} S3Bucket: {} SQSMessageId: {}".format(link,bucket,messageId))
await process_file_secondary_CLv2(bucket, link, client, session)
logging.info("Finished processing a secondary data fdrv2 bucket. S3file: {} S3Bucket: {} SQSMessageId: {}".format(link,bucket,messageId))

else:
logging.info("Started processing data bucket. S3file: {} S3Bucket: {} SQSMessageId: {}".format(link,bucket,messageId))
await process_file_primary_CLv2(bucket, link, client, session, eventsSchemaMappingDict, requiredFieldsMappingDict)
logging.info("Finished processing data bucket. S3file: {} S3Bucket: {} SQSMessageId: {}".format(link,bucket,messageId))

except Exception as e:
logging.error('Error while processing bucket {}. Error: {}'.format(link, str(e)))
logging.error('Error while processing S3file: {} S3Bucket: {} SQSMessageId: {}. Error: {}'.format(link, bucket, messageId, str(e)))
raise e

# This method customizes the data before ingestion. Both normalized and raw data is returned from this method.
Expand Down Expand Up @@ -165,7 +170,7 @@ def customize_event(line, eventsSchemaMappingDict, requiredFieldsMappingDict, re
# eventsSchemaMappingDict : Dictionary
# requiredFieldsMappingDict : Dictionary
async def process_file_primary_CLv2(bucket, s3_path, client, session, eventsSchemaMappingDict, requiredFieldsMappingDict):
logging.info("Start processing bucket {}".format(s3_path))
logging.debug("Inside method - process_file_primary_CLv2. Started processing S3file: {} S3Bucket: {}".format(s3_path, bucket))
normalizedSentinelHelperCollection = SentinelHelperCollection(session,
eventsSchemaMappingDict,
NORMALIZED_DCE_ENDPOINT,
Expand All @@ -181,16 +186,16 @@ async def process_file_primary_CLv2(bucket, s3_path, client, session, eventsSche
)

try:
logging.info("Making request to AWS for downloading file started time: {} ".format(datetime.now()))
logging.info("Making request to AWS for downloading file startTime: {} S3file: {} S3Bucket: {}".format(datetime.now(), s3_path, bucket))
response = await client.get_object(Bucket=bucket, Key=s3_path)
response_body_size = sys.getsizeof(response["Body"])
logging.info("downloaded S3 file: {} of size: {} from AWS S3 successfully time: {} ".format(s3_path, response_body_size,datetime.now()))
logging.info("Download from AWS completed. S3file: {} S3Bucket: {} size: {} from AWS S3 successfully time: {} ".format(s3_path, bucket, response_body_size,datetime.now()))
s = ''
async for decompressed_chunk in AsyncGZIPDecompressedStream(response["Body"]):
#logging.info("Inside AsyncGZIPDecompressedStream time: {} ".format(datetime.now()))
logging.debug("Inside AsyncGZIPDecompressedStream time: {} ".format(datetime.now()))
s += decompressed_chunk.decode(errors='ignore')
lines = re.split(r'{0}'.format(LINE_SEPARATOR), s)
#logging.info("Inside AsyncGZIPDecompressedStream File: {} downloaded and length: {} ".format(s3_path,len(lines)))
logging.debug("Inside AsyncGZIPDecompressedStream File: {} downloaded and length: {} ".format(s3_path,len(lines)))
for n, line in enumerate(lines):
if n < len(lines) - 1:
if line:
Expand Down Expand Up @@ -227,9 +232,9 @@ async def process_file_primary_CLv2(bucket, s3_path, client, session, eventsSche
else:
custom_total_events_success, custom_total_events_failure = 0,0

logging.info("Finish processing file {} with {} normalized events and {} custom events.".format(s3_path,normalized_total_events_success,custom_total_events_success))
logging.info("Finish processing S3file: {} S3Bucket: {} SuccessNormalizedEventsCount: {} and SuccessRawDataEventsCount: {}".format(s3_path,bucket,normalized_total_events_success,custom_total_events_success))
if normalized_total_events_failure or custom_total_events_failure:
logging.info("Failure in {}: {} normalized events failed and {} custom events failed.".format(s3_path,normalized_total_events_failure,custom_total_events_failure))
logging.info("Failure in processing S3file: {} S3Bucket: {} FailedNormalizedEventsCount: {} FailedRawDataEventsCount:{} ".format(s3_path, bucket, normalized_total_events_failure,custom_total_events_failure))

except Exception as e:
logging.warn("Processing file {} was failed. Error: {}".format(s3_path,e))
Expand All @@ -240,7 +245,7 @@ async def process_file_primary_CLv2(bucket, s3_path, client, session, eventsSche
# client : s3_session client
# session : aiohttp session
async def process_file_secondary_CLv2(bucket, s3_path, client, session):
logging.info("Start processing bucket {}".format(s3_path))
logging.debug("Inside method - process_file_secondary_CLv2. Started processing S3file: {} S3Bucket: {}".format(s3_path, bucket))
AzureSentinelConnector = AzureSentinelConnectorCLv2Async(session, NORMALIZED_DCE_ENDPOINT, NORMALIZED_DCR_ID, SECONDARY_DATA_SCHEMA,
AZURE_CLIENT_ID, AZURE_CLIENT_SECRET, AZURE_TENANT_ID)

Expand Down Expand Up @@ -279,12 +284,13 @@ async def process_file_secondary_CLv2(bucket, s3_path, client, session):
total_events_success = AzureSentinelConnector.get_success_count()
total_events_failure = AzureSentinelConnector.get_failure_count()

logging.info("Finish processing file {} with {} secondary events.".format(s3_path,total_events_success))
logging.info("Finish processing Secondary data S3file: {} S3Bucket: {} SuccessEventsCount: {} ".format(s3_path,bucket,total_events_success))
if total_events_failure:
logging.info("Failure in {} : {} secondary events failed".format(s3_path,total_events_failure))
logging.info("Failure in processing Secondary data S3file: {} S3Bucket: {} FailureEventsCount: {} ".format(s3_path,bucket,total_events_failure))


except Exception as e:
logging.warn("Processing file {} was failed. Error: {}".format(s3_path,e))
logging.warn("Failed processing file S3File: {} S3Bucket: {} - Error: {}".format(s3_path,bucket,e))
raise e

class FileHelper:
Expand Down
Loading

0 comments on commit e7b4e5a

Please sign in to comment.