Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
68 changes: 54 additions & 14 deletions Packs/knowbe4Phisher/Integrations/knowbe4Phisher/knowbe4Phisher.py
Original file line number Diff line number Diff line change
Expand Up @@ -400,7 +400,7 @@ def test_module(client: Client) -> str:

try:
_, incidents = fetch_incidents(
client=client, last_run=demisto.getLastRun(), first_fetch_time=first_fetch_time, max_fetch=fetch_limit
client=client, last_run=demisto.getLastRun(), first_fetch_time=first_fetch_time, max_fetch=fetch_limit, look_back=0
)

if incidents:
Expand Down Expand Up @@ -634,7 +634,13 @@ def phisher_delete_tags_command(client: Client, args: dict) -> str:
return "The tags weren't deleted - check the ID"


def fetch_incidents(client: Client, last_run: dict, first_fetch_time: str, max_fetch: int) -> tuple[str, list]:
def fetch_incidents(
client: Client,
last_run: dict,
first_fetch_time: str,
max_fetch: int,
look_back: int = 0,
) -> tuple[dict, list]:
"""
fetch_incidents is being called from the fetch_incidents_command function.
it checks the last message fetch, checking number of new events, and getting all messages
Expand All @@ -646,19 +652,27 @@ def fetch_incidents(client: Client, last_run: dict, first_fetch_time: str, max_f

args:
client (Client): Phisher client
last_run (Dict): dict containing the time of the last fetched message
last_run (Dict): SDK lookback last-run object {time, limit, found_incident_ids}
first_fetch_time (String): the first fetch parameter from integration instance for the first fetch
max_fetch (Int): maximum number for each fetch
look_back (Int): minutes to look back to recover late-indexed messages (EIR-14074)

returns:
next_run: timestamp of the last message fetched so next fetch will know from where to start
next_run: updated last-run dict for the SDK lookback pattern
incidents: list of incidents to be written to XSOAR
"""
last_time = last_run.get("last_fetch", first_fetch_time)
query = f'" reported_at:{{{last_time} TO *}}"'
max_fetch = int(max_fetch)
limit = last_run.get("limit", max_fetch)
start_fetch_time, end_fetch_time = get_fetch_run_time_range(
last_run=last_run,
first_fetch=first_fetch_time,
look_back=look_back,
date_format=DATE_FORMAT,
)
query = f'" reported_at:{{{start_fetch_time} TO {end_fetch_time}}}"'
incidents = []
# create request
payload_init = FETCH_WITHOUT_EVENTS.format(max_fetch, query)
payload_init = FETCH_WITHOUT_EVENTS.format(limit, query)
payload = json.dumps({"query": payload_init, "variables": {}})
req = create_gql_request(payload)
# get all messages
Expand All @@ -667,19 +681,39 @@ def fetch_incidents(client: Client, last_run: dict, first_fetch_time: str, max_f
for message in messages:
events = message.get("events", {})
creation_time = get_created_time(events)
message["created at"] = arg_to_datetime(creation_time).isoformat() # type: ignore
message.pop("events")
# cursor field must stay in DATE_FORMAT for the SDK lookback helpers
message["created_at_cursor"] = creation_time if creation_time else start_fetch_time
message["created at"] = arg_to_datetime(creation_time).isoformat() if creation_time else start_fetch_time # type: ignore
message.pop("events", None)

messages_filtered = filter_incidents_by_duplicates_and_limit(
incidents_res=messages,
last_run=last_run,
fetch_limit=max_fetch,
id_field="id",
)

for message in messages_filtered:
incident = {
"dbotMirrorId": message.get("id"),
"name": message.get("subject"),
"occurred": message.get("created at"),
"rawJSON": json.dumps(message),
}

last_time = message["created at"]
incidents.append(incident)

next_run = last_time
next_run = update_last_run_object(
last_run=last_run,
incidents=messages_filtered,
fetch_limit=max_fetch,
start_fetch_time=start_fetch_time,
end_fetch_time=end_fetch_time,
look_back=look_back,
created_time_field="created_at_cursor",
id_field="id",
date_format=DATE_FORMAT,
increase_last_run_time=False,
)
return next_run, incidents


Expand All @@ -692,13 +726,19 @@ def fetch_incidents_command(client: Client) -> None:
"""
first_fetch_time = client.first_fetch_time
fetch_limit = arg_to_number(client.max_fetch)
look_back = arg_to_number(demisto.params().get("look_back")) or 0
last_run = demisto.getLastRun() or {}
# migrate legacy lastRun shape {"last_fetch": <iso>} to SDK lookback shape
if "time" not in last_run and last_run.get("last_fetch"):
last_run = {"time": last_run["last_fetch"], "found_incident_ids": {}}
next_run, incidents = fetch_incidents(
client=client,
last_run=demisto.getLastRun(),
last_run=last_run,
first_fetch_time=first_fetch_time,
max_fetch=fetch_limit, # type: ignore
look_back=look_back,
)
demisto.setLastRun({"last_fetch": next_run})
demisto.setLastRun(next_run)
demisto.incidents(incidents)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,12 +39,19 @@ configuration:
type: 0
required: false
section: Collect
- display: 'Advanced: Minutes to look back when fetching'
defaultvalue: '15'
name: look_back
type: 0
required: false
section: Collect
additionalinfo: How far back in minutes to look for messages on each fetch. Use this to recover PhishER messages that were reported earlier but indexed late. Duplicates within the overlap window are filtered automatically.
- display: Incident type
defaultvalue: PhishER
name: incidentType
type: 13
required: false
section: Collect
defaultvalue: PhishER
- display: Trust any certificate (not secure)
name: insecure
type: 8
Expand Down Expand Up @@ -173,7 +180,7 @@ script:
- description: Comma separated list of tags to add.
name: tags
required: true
description: Add tags to a given message. If you have existing PhishER actions that would trigger for the tag that you're adding, you'll need to manually run the actions.
description: Add tags to a given message. If you have existing PhishER actions that would trigger for the tag that you're adding, you'll need to manually run the actions.
name: phisher-tags-create
- arguments:
- description: Message ID.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@
In order to generate a new API Key in your KnowBE4 console navigate to **Account Settings** > **Account Integrations** > **API** > **PhishER API**

### Configuration params
**NOTE**: If using 6.0.2 or lower version, put your API Key in the **Password** field, leave the **apikey** field empty.
**NOTE**: If using 6.0.2 or lower version, put your API Key in the **Password** field, leave the **apikey** field empty.
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import knowbe4Phisher as phisher
import pytest
from CommonServerPython import CommandResults
from freezegun import freeze_time
from test_data.mock_tests import (
create_request_test,
events_example,
Expand Down Expand Up @@ -91,6 +92,209 @@ def test_fetch_incidents(mocker, last_run, first_fetch, max_fetch, expected, res
assert result == expected


# --- Lookback / EIR-14074 tests ---

MSG_A = {
"actionStatus": "RECEIVED",
"category": "UNKNOWN",
"comments": [],
"events": [
{"causer": "null", "createdAt": "2024-01-01T10:00:00Z", "eventType": "CREATED", "id": "evt-a1", "triggerer": "null"},
],
"from": "a@example.com",
"id": "msg-a",
"phishmlReport": None,
"pipelineStatus": "PROCESSED",
"severity": "UNKNOWN_SEVERITY",
"subject": "Message A",
"tags": [],
}

MSG_B = {
"actionStatus": "RECEIVED",
"category": "UNKNOWN",
"comments": [],
"events": [
{"causer": "null", "createdAt": "2024-01-01T10:05:00Z", "eventType": "CREATED", "id": "evt-b1", "triggerer": "null"},
],
"from": "b@example.com",
"id": "msg-b",
"phishmlReport": None,
"pipelineStatus": "PROCESSED",
"severity": "UNKNOWN_SEVERITY",
"subject": "Message B",
"tags": [],
}


def _gql_response(messages):
return {"data": {"phisherMessages": {"nodes": messages, "pagination": {"page": 1, "pages": 1, "per": 50, "totalCount": len(messages)}}}}


@freeze_time("2024-01-01T11:00:00Z")
def test_fetch_incidents_first_run(mocker):
"""
Given:
- No prior run (empty last_run), first_fetch of 7 days, 2 messages returned from API

When:
- fetch_incidents is called

Then:
- Both incidents are emitted, next_run has 'time' and 'found_incident_ids' containing both message ids
"""
mocker.patch.object(client, "phisher_gql_request", return_value=_gql_response([MSG_A, MSG_B]))
next_run, incidents = phisher.fetch_incidents(client, {}, "7 days", 50)
assert len(incidents) == 2
assert "time" in next_run
assert "found_incident_ids" in next_run
assert "msg-a" in next_run["found_incident_ids"]
assert "msg-b" in next_run["found_incident_ids"]


@freeze_time("2024-01-01T11:00:00Z")
def test_fetch_incidents_dedup_via_found_ids(mocker):
"""
Given:
- last_run contains msg-a in found_incident_ids, API returns both msg-a and msg-b

When:
- fetch_incidents is called

Then:
- Only msg-b is emitted (msg-a is deduped), found_incident_ids still includes msg-b
"""
last_run = {"time": "2024-01-01T10:00:00Z", "found_incident_ids": {"msg-a": 1704067200}}
mocker.patch.object(client, "phisher_gql_request", return_value=_gql_response([MSG_A, MSG_B]))
next_run, incidents = phisher.fetch_incidents(client, last_run, "7 days", 50)
assert len(incidents) == 1
assert incidents[0]["dbotMirrorId"] == "msg-b"
assert "msg-b" in next_run["found_incident_ids"]


@freeze_time("2024-01-01T11:00:00Z")
def test_fetch_incidents_late_arrival_recovered(mocker):
"""
Given:
- last_run time is T-30min (10:30Z), look_back=60
- API returns a message with created_at = 2024-01-01T10:05:00Z (T-55min, before last_run["time"])
- That message is NOT in found_incident_ids

When:
- fetch_incidents is called with look_back=60

Then:
- The late-arriving message IS emitted (lookback expanded the start window to T-60min)
"""
last_run = {"time": "2024-01-01T10:30:00Z", "found_incident_ids": {}}
late_msg = {
"actionStatus": "RECEIVED",
"category": "UNKNOWN",
"comments": [],
"events": [
{"causer": "null", "createdAt": "2024-01-01T10:05:00Z", "eventType": "CREATED", "id": "evt-late", "triggerer": "null"},
],
"from": "late@example.com",
"id": "msg-late",
"phishmlReport": None,
"pipelineStatus": "PROCESSED",
"severity": "UNKNOWN_SEVERITY",
"subject": "Late message",
"tags": [],
}
mocker.patch.object(client, "phisher_gql_request", return_value=_gql_response([late_msg]))
next_run, incidents = phisher.fetch_incidents(client, last_run, "7 days", 50, look_back=60)
assert len(incidents) == 1
assert incidents[0]["dbotMirrorId"] == "msg-late"


@freeze_time("2024-01-01T11:00:00Z")
def test_fetch_incidents_lookback_zero_no_overlap(mocker):
"""
Given:
- last_run time is 10:30Z, look_back=0
- A spy captures the GQL payload sent to phisher_gql_request

When:
- fetch_incidents is called with look_back=0

Then:
- The GQL payload contains 'reported_at:{2024-01-01T10:30:00Z TO' (no window expansion)
"""
last_run = {"time": "2024-01-01T10:30:00Z", "found_incident_ids": {}}
spy = mocker.patch.object(client, "phisher_gql_request", return_value=_gql_response([]))
phisher.fetch_incidents(client, last_run, "7 days", 50, look_back=0)
call_arg = spy.call_args[0][0]
assert "reported_at:{2024-01-01T10:30:00Z TO" in call_arg


@freeze_time("2024-01-01T11:00:00Z")
def test_fetch_incidents_max_fetch_truncates(mocker):
"""
Given:
- API returns 5 messages, max_fetch=2

When:
- fetch_incidents is called

Then:
- Only 2 incidents are emitted
"""
msgs = [
{**MSG_A, "id": f"msg-{i}", "subject": f"Msg {i}",
"events": [{"causer": "null", "createdAt": f"2024-01-01T10:0{i}:00Z", "eventType": "CREATED", "id": f"evt-{i}", "triggerer": "null"}]}
for i in range(5)
]
mocker.patch.object(client, "phisher_gql_request", return_value=_gql_response(msgs))
_next_run, incidents = phisher.fetch_incidents(client, {}, "7 days", 2)
assert len(incidents) == 2


@freeze_time("2024-01-01T11:00:00Z")
def test_fetch_incidents_legacy_last_fetch_migration(mocker):
"""
Given:
- last_run has legacy shape {"last_fetch": "2024-01-01T10:00:00Z"} (pre-lookback upgrade)
- API returns msg-a

When:
- fetch_incidents is called

Then:
- msg-a is emitted (legacy time is used, not first_fetch fallback)
- next_run has the new dict shape with 'time' and 'found_incident_ids'
"""
# legacy state gets migrated in fetch_incidents_command; fetch_incidents itself
# receives the already-migrated dict, so we simulate that here
legacy_migrated = {"time": "2024-01-01T10:00:00Z", "found_incident_ids": {}}
mocker.patch.object(client, "phisher_gql_request", return_value=_gql_response([MSG_A]))
next_run, incidents = phisher.fetch_incidents(client, legacy_migrated, "7 days", 50)
assert len(incidents) == 1
assert "time" in next_run
assert "found_incident_ids" in next_run


@freeze_time("2024-01-01T11:00:00Z")
def test_fetch_incidents_query_uses_window(mocker):
"""
Given:
- last_run time is 10:30Z, look_back=0
- now is frozen at 11:00Z

When:
- fetch_incidents is called

Then:
- GQL payload uses a closed window 'reported_at:{10:30:00Z TO 11:00:00Z}', not open-ended 'TO *'
"""
last_run = {"time": "2024-01-01T10:30:00Z", "found_incident_ids": {}}
spy = mocker.patch.object(client, "phisher_gql_request", return_value=_gql_response([]))
phisher.fetch_incidents(client, last_run, "7 days", 50, look_back=0)
call_arg = spy.call_args[0][0]
assert "reported_at:{2024-01-01T10:30:00Z TO 2024-01-01T11:00:00Z}" in call_arg
assert "TO *" not in call_arg


def test_time_creation():
"""
Given:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,8 @@
"rawJSON": '{"actionStatus": "RECEIVED", "category": "UNKNOWN", "comments": [], "from": "ek@gmail.com", \
"id": "bac9cf67-fa8e-46d1-ad67-69513fc44b5b", "phishmlReport": "null", "pipelineStatus": "PROCESSED", "severity": \
"UNKNOWN_SEVERITY", "subject": "Fwd: We have received your IT request", "tags": [{"name": "KB4:SECURITY", "type": \
"STANDARD"}, {"name": "KB4:URGENCY", "type": "STANDARD"}], "created at": "2021-08-08T14:06:11+00:00"}',
"STANDARD"}, {"name": "KB4:URGENCY", "type": "STANDARD"}], "created_at_cursor": "2021-08-08T14:06:11Z", \
"created at": "2021-08-08T14:06:11+00:00"}',
}
]
),
Expand Down
6 changes: 6 additions & 0 deletions Packs/knowbe4Phisher/ReleaseNotes/1_0_20.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
#### Integrations

##### PhishER

- Fixed an issue where messages reported before the previous fetch window but indexed late were silently dropped (EIR-14074).
- Added the *look_back* parameter (minutes) to the integration configuration. Default is 15 minutes. Tenants observing larger PhishER indexing delays should increase this value.
Loading
Loading