Skip to content

Add command to query GP logs to SLS integration #38876

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
48 commits
Select commit Hold shift + click to select a range
39068d1
update with new command
ali-sawyer Mar 3, 2025
50ab187
fix validation issues
ali-sawyer Mar 3, 2025
d57c7d7
update docs and RNs
ali-sawyer Mar 3, 2025
1b7b072
increment docker img version
ali-sawyer Mar 3, 2025
53f3713
add docker update note to RNs
ali-sawyer Mar 3, 2025
9c08a01
Update Packs/CortexDataLake/Integrations/CortexDataLake/CortexDataLak…
ali-sawyer Mar 6, 2025
276b80f
Update Packs/CortexDataLake/Integrations/CortexDataLake/CortexDataLak…
ali-sawyer Mar 6, 2025
466f57c
address review comments
ali-sawyer Mar 6, 2025
f3f7b1c
revert unintentional change
ali-sawyer Mar 6, 2025
825a3ec
add section settings
ali-sawyer Mar 6, 2025
cd0ad03
fix integration name in RNs
ali-sawyer Mar 6, 2025
f5b819d
change version number
ali-sawyer Mar 10, 2025
2d81d62
update README
ali-sawyer Mar 11, 2025
319fba5
add unit test
ali-sawyer Mar 11, 2025
0f7ca69
fix indentation error
ali-sawyer Mar 11, 2025
9316fff
add start_time to unit test
ali-sawyer Mar 11, 2025
3d66447
fix unit test issues
ali-sawyer Mar 11, 2025
b66290b
change test file timestamps
ali-sawyer Mar 11, 2025
193c27b
change outputs slightly after getting access to lab env
ali-sawyer Mar 21, 2025
63f762e
Merge branch 'contrib/ali-sawyer_sls-add-gp-command' into sls-add-gp-…
ali-sawyer Mar 21, 2025
7eb0f53
increment RN number
ali-sawyer Mar 21, 2025
5db0126
add stage arg
ali-sawyer Mar 21, 2025
1012840
fix name in YML
ali-sawyer Mar 24, 2025
c55c07c
revert indentation
ali-sawyer Mar 31, 2025
051dd34
Update arg description
ali-sawyer Apr 2, 2025
8466e74
Update arg description
ali-sawyer Apr 2, 2025
3053f0e
Update arg description
ali-sawyer Apr 2, 2025
adea296
Update arg description
ali-sawyer Apr 2, 2025
be55c42
Update arg description
ali-sawyer Apr 2, 2025
749f57e
Update arg descriptions
ali-sawyer Apr 2, 2025
5d5806b
Update arg description
ali-sawyer Apr 2, 2025
85c940b
Update arg description
ali-sawyer Apr 2, 2025
5aa2153
Update arg description
ali-sawyer Apr 2, 2025
7845f0e
Update README
ali-sawyer Apr 2, 2025
2baf450
Update README
ali-sawyer Apr 2, 2025
cb92fb0
Update README
ali-sawyer Apr 2, 2025
2c1e7f6
Update RNs
ali-sawyer Apr 2, 2025
16543b3
Update README
ali-sawyer Apr 2, 2025
014c364
Update arg description
ali-sawyer Apr 2, 2025
ee8e9e5
Update arg description
ali-sawyer Apr 2, 2025
9a6de68
Update arg description
ali-sawyer Apr 2, 2025
73c9d87
Update README
ali-sawyer Apr 2, 2025
51dfbc5
Update README
ali-sawyer Apr 2, 2025
2784e5c
Update README
ali-sawyer Apr 2, 2025
f2cab89
Update README
ali-sawyer Apr 2, 2025
5015844
Update README
ali-sawyer Apr 2, 2025
7a4379e
Update README
ali-sawyer Apr 2, 2025
a94b575
Update README
ali-sawyer Apr 2, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
208 changes: 153 additions & 55 deletions Packs/CortexDataLake/Integrations/CortexDataLake/CortexDataLake.py
Original file line number Diff line number Diff line change
Expand Up @@ -670,6 +670,68 @@ def files_context_transformer(row_content: dict) -> dict:
}


def gp_context_transformer(row_content: dict) -> dict:
"""
This function retrieves data from a row of raw data into context path locations.
Documentation: https://docs.paloaltonetworks.com/strata-logging-service/log-reference/network-logs/network-globalprotect-log

Args:
row_content: a dict representing raw data of a row

Returns:
a dict with context paths and their corresponding value
"""
return {
'AttemptedGateways': row_content.get('attempted_gateways'),
'AuthMethod': row_content.get('auth_method'),
'ConnectionErrorID': row_content.get('connection_error', {}).get('id'),
'ConnectionErrorValue': row_content.get('connection_error', {}).get('value'),
'CountOfRepeats': row_content.get('count_of_repeats'),
'CustomerID': row_content.get('customer_id'),
'EndpointDeviceName': row_content.get('endpoint_device_name'),
'EndpointGPVersion': row_content.get('endpoint_gp_version'),
'EndpointOSType': row_content.get('endpoint_os_type'),
'EndpointOSVersion': row_content.get('endpoint_os_version'),
'EventID': row_content.get('event_id', {}).get('value'),
'Gateway': row_content.get('gateway'),
'GatewayPriority': row_content.get('gateway_priority', {}).get('value'),
'GatewaySelectionType': row_content.get('gateway_selection_type'),
'HostID': row_content.get('host_id'),
'IsDuplicateLog': row_content.get('is_dup_log'),
'IsExported': row_content.get('is_exported'),
'IsForwarded': row_content.get('is_forwarded'),
'IsPrismaBranch': row_content.get('is_prisma_branch'),
'IsPrismaMobile': row_content.get('is_prisma_mobile'),
'LogSource': row_content.get('log_source'),
'LogSourceID': row_content.get('log_source_id'),
'LogSourceName': row_content.get('log_source_name'),
'LogTime': human_readable_time_from_epoch_time(row_content.get('log_time', 0)),
'LogType': row_content.get('log_type', {}).get('value'),
'LoginDuration': row_content.get('login_duration'),
'Opaque': row_content.get('opaque'),
'PlatformType': row_content.get('platform_type'),
'Portal': row_content.get('portal'),
'PrivateIPv4': row_content.get('private_ip', {}).get('value'),
'PrivateIPv6': row_content.get('private_ipv6', {}).get('value'),
'ProjectName': row_content.get('project_name'),
'PublicIPv4': row_content.get('public_ip', {}).get('value'),
'PublicIPv6': row_content.get('public_ipv6', {}).get('value'),
'QuarantineReason': row_content.get('quarantine_reason'),
'SequenceNo': row_content.get('sequence_no'),
'SourceRegion': row_content.get('source_region'),
'SourceUser': row_content.get('source_user'),
'SourceUserDomain': row_content.get('source_user_info', {}).get('domain'),
'SourceUserName': row_content.get('source_user_info', {}).get('name'),
'SSLResponseTime': row_content.get('ssl_response_time'),
'Stage': row_content.get('stage'),
'EventStatus': row_content.get('status', {}).get('value'),
'Subtype': row_content.get('sub_type', {}).get('value'),
'TimeGenerated': human_readable_time_from_epoch_time(row_content.get('time_generated', 0)),
'TunnelType': row_content.get('tunnel'),
'VendorName': row_content.get('vendor_name'),
}


def records_to_human_readable_output(fields: str, table_name: str, results: list) -> str:
"""
This function gets all relevant data for the human readable output of a specific table.
Expand All @@ -690,15 +752,26 @@ def records_to_human_readable_output(fields: str, table_name: str, results: list
if fields == "*":
for result in results:
filtered_result = {
"Source Address": result.get("source_ip", {}).get("value"),
"Destination Address": result.get("dest_ip", {}).get("value"),
"Application": result.get("app"),
"Action": result.get("action", {}).get("value"),
"RuleMatched": result.get("rule_matched"),
"TimeGenerated": human_readable_time_from_epoch_time(result.get("time_generated")),
"FileID": result.get("file_id"),
"FileName": result.get("file_name"),
"FileType": result.get("file_type"),
'Source Address': result.get('source_ip', {}).get('value'),
'Public IP': result.get('public_ip', {}).get('value'),
'Public IPv6': result.get('public_ipv6', {}).get('value'),
'Destination Address': result.get('dest_ip', {}).get('value'),
'Private IP': result.get('private_ip', {}).get('value'),
'Private IPv6': result.get('private_ipv6', {}).get('value'),
'Application': result.get('app'),
'Action': result.get('action', {}).get('value'),
'Stage': result.get('stage'),
'Status': result.get('status', {}).get('value'),
'Connection Error': result.get('connection_error').get('value'),
'RuleMatched': result.get('rule_matched'),
'TimeGenerated': human_readable_time_from_epoch_time(result.get('time_generated')),
'FileID': result.get('file_id'),
'FileName': result.get('file_name'),
'FileType': result.get('file_type'),
'Source User': result.get('source_user'),
'Source Region': result.get('source_region'),
'Gateway': result.get('gateway'),
'Portal': result.get('portal'),
}
filtered_results.append(filtered_result)
else:
Expand Down Expand Up @@ -760,42 +833,51 @@ def build_where_clause(args: dict) -> str:
A string represents the where part of a SQL query
"""
args_dict = {
"source_ip": "source_ip.value",
"dest_ip": "dest_ip.value",
"rule_matched": "rule_matched",
"from_zone": "from_zone",
"to_zone": "to_zone",
"source_port": "source_port",
"dest_port": "dest_port",
"action": "action.value",
"file_sha_256": "file_sha_256",
"file_name": "file_name",
"app": "app",
"app_category": "app_category",
"dest_device_port": "dest_device_port",
"dest_edl": "dest_edl",
"dest_dynamic_address_group": "dest_dynamic_address_group",
"dest_location": "dest_location",
"dest_user": "dest_user",
"file_type": "file_type",
"is_server_to_client": "is_server_to_client",
"is_url_denied": "is_url_denied",
"log_type": "log_type",
"nat_dest": "nat_dest",
"nat_dest_port": "nat_dest_port",
"nat_source": "nat_source",
"nat_source_port": "nat_source_port",
"rule_matched_uuid": "rule_matched_uuid",
"severity": "severity",
"source_device_host": "source_device_host",
"source_edl": "source_edl",
"source_dynamic_address_group": "source_dynamic_address_group",
"source_location": "source_location",
"source_user": "source_user",
"sub_type": "sub_type.value",
"time_generated": "time_generated",
"url_category": "url_category",
"url_domain": "url_domain",
'source_ip': 'source_ip.value',
'dest_ip': 'dest_ip.value',
'rule_matched': 'rule_matched',
'from_zone': 'from_zone',
'to_zone': 'to_zone',
'source_port': 'source_port',
'dest_port': 'dest_port',
'action': 'action.value',
'file_sha_256': 'file_sha_256',
'file_name': 'file_name',
'app': 'app',
'app_category': 'app_category',
'dest_device_port': 'dest_device_port',
'dest_edl': 'dest_edl',
'dest_dynamic_address_group': 'dest_dynamic_address_group',
'dest_location': 'dest_location',
'dest_user': 'dest_user',
'file_type': 'file_type',
'is_server_to_client': 'is_server_to_client',
'is_url_denied': 'is_url_denied',
'log_type': 'log_type',
'nat_dest': 'nat_dest',
'nat_dest_port': 'nat_dest_port',
'nat_source': 'nat_source',
'nat_source_port': 'nat_source_port',
'rule_matched_uuid': 'rule_matched_uuid',
'severity': 'severity',
'source_device_host': 'source_device_host',
'source_edl': 'source_edl',
'source_dynamic_address_group': 'source_dynamic_address_group',
'source_location': 'source_location',
'source_user': 'source_user',
'sub_type': 'sub_type.value',
'time_generated': 'time_generated',
'url_category': 'url_category',
'url_domain': 'url_domain',
'event_name': 'event_id.value',
'gateway': 'gateway',
'private_ipv4': 'private_ip.value',
'private_ipv6': 'private_ipv6.value',
'public_ipv4': 'public_ip.value',
'public_ipv6': 'public_ipv6.value',
'event_status': 'status.value',
'portal': 'portal',
'stage': 'stage',
}
if args.get("ip") and (args.get("source_ip") or args.get("dest_ip")):
raise DemistoException('Error: "ip" argument cannot appear with either "source_ip" nor "dest_ip"')
Expand Down Expand Up @@ -1120,9 +1202,23 @@ def query_file_data_command(args: dict, client: Client) -> tuple[str, dict, list
return query_table_logs(args, client, query_table_name, context_transformer_function, table_context_path)


def query_table_logs(
args: dict, client: Client, table_name: str, context_transformer_function: Callable[[dict], dict], table_context_path: str
) -> tuple[str, dict, list[dict[str, Any]]]:
def query_gp_logs_command(args: dict, client: Client):
"""
The function of the command that queries firewall.globalprotect table

Returns: a Demisto's entry with all the parsed data
"""
table_name: str = 'globalprotect'
context_transformer_function = gp_context_transformer
table_context_path: str = 'CDL.Logging.GlobalProtect'
return query_table_logs(args, client, table_name, context_transformer_function, table_context_path)


def query_table_logs(args: dict,
client: Client,
table_name: str,
context_transformer_function: Callable[[dict], dict],
table_context_path: str) -> tuple[str, dict, list[dict[str, Any]]]:
"""
This function is a generic function that get's all the data needed for a specific table of Cortex and acts as a
regular command function
Expand Down Expand Up @@ -1265,13 +1361,15 @@ def main():
return_outputs(*query_url_logs_command(args, client))
elif command == "cdl-query-file-data":
return_outputs(*query_file_data_command(args, client))
elif command == "fetch-incidents":
first_fetch_timestamp = params.get("first_fetch_timestamp", "24 hours").strip()
fetch_severity = params.get("firewall_severity")
fetch_table = params.get("fetch_table")
fetch_fields = params.get("fetch_fields") or "*"
fetch_subtype = params.get("firewall_subtype")
fetch_limit = params.get("limit")
elif command == 'cdl-query-gp-logs':
return_outputs(*query_gp_logs_command(args, client))
elif command == 'fetch-incidents':
first_fetch_timestamp = params.get('first_fetch_timestamp', '24 hours').strip()
fetch_severity = params.get('firewall_severity')
fetch_table = params.get('fetch_table')
fetch_fields = params.get('fetch_fields') or '*'
fetch_subtype = params.get('firewall_subtype')
fetch_limit = params.get('limit')
last_run = demisto.getLastRun()
fetch_filter = params.get("filter_query", "")
next_run, incidents = fetch_incidents(
Expand Down
Loading
Loading