Skip to content

Commit

Permalink
plgn-380 salesforce add deduping (#1993)
Browse files Browse the repository at this point in the history
  • Loading branch information
llaszuk-r7 authored and ablakley-r7 committed Sep 28, 2023
1 parent 886c799 commit 22d64f6
Show file tree
Hide file tree
Showing 3 changed files with 63 additions and 2 deletions.
2 changes: 1 addition & 1 deletion plugins/salesforce/help.md
Original file line number Diff line number Diff line change
Expand Up @@ -531,7 +531,7 @@ _This plugin does not contain any troubleshooting information._

# Version History

* 2.0.3 - Implemented token auto-refresh on expiration for continuous sessions
* 2.0.3 - Implemented token auto-refresh on expiration for continuous sessions | Task Monitor Users: add flag `remove_duplicates` for duplicated events
* 2.0.2 - Task Monitor Users: query improvement | Handle exception related with grant type
* 2.0.1 - Add extra logs register
* 2.0.0 - Code refactor | Update plugin to be cloud enabled | Add new task Monitor Users
Expand Down
36 changes: 35 additions & 1 deletion plugins/salesforce/komand_salesforce/tasks/monitor_users/task.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
from komand_salesforce.util.exceptions import ApiException
from komand_salesforce.util.helpers import clean, convert_to_camel_case

from ...util.event import UserEvent


class MonitorUsers(insightconnect_plugin_runtime.Task):
USER_LOGIN_QUERY = "SELECT LoginTime, UserId, LoginType, LoginUrl, SourceIp, Status, Application, Browser FROM LoginHistory WHERE LoginTime >= {start_timestamp} AND LoginTime < {end_timestamp}"
Expand All @@ -19,6 +21,7 @@ class MonitorUsers(insightconnect_plugin_runtime.Task):
NEXT_USER_COLLECTION_TIMESTAMP = "next_user_collection_timestamp"
NEXT_USER_LOGIN_COLLECTION_TIMESTAMP = "next_user_login_collection_timestamp"
LAST_USER_LOGIN_COLLECTION_TIMESTAMP = "last_user_login_collection_timestamp"
REMOVE_DUPLICATES = "remove_duplicates"

def __init__(self):
super(self.__class__, self).__init__(
Expand All @@ -38,6 +41,7 @@ def run(self, params={}, state={}): # noqa: C901
get_users = False
get_user_login_history = False

remove_duplicates = state.pop(self.REMOVE_DUPLICATES, True) # true as a default
users_next_page_id = state.get(self.USERS_NEXT_PAGE_ID)
state.pop(self.USERS_NEXT_PAGE_ID, None)
user_login_next_page_id = state.get(self.USER_LOGIN_NEXT_PAGE_ID)
Expand Down Expand Up @@ -111,6 +115,7 @@ def run(self, params={}, state={}): # noqa: C901
self.UPDATED_USERS_QUERY.format(user_ids=concatenated_ids), None
).get("records", [])

self.logger.info(f"{len(updated_users)} updated users added to output")
records.extend(self.add_data_type_field(updated_users, "User Update"))

if get_users:
Expand All @@ -119,6 +124,8 @@ def run(self, params={}, state={}): # noqa: C901
if users_next_page_id:
state[self.USERS_NEXT_PAGE_ID] = users_next_page_id
has_more_pages = True

self.logger.info(f"{len(response.get('records'))} users added to output")
records.extend(self.add_data_type_field(response.get("records", []), "User"))

if get_user_login_history:
Expand All @@ -133,13 +140,38 @@ def run(self, params={}, state={}): # noqa: C901
if user_login_next_page_id:
state[self.USER_LOGIN_NEXT_PAGE_ID] = user_login_next_page_id
has_more_pages = True

self.logger.info(f"{len(response.get('records'))} users login added to output")
records.extend(self.add_data_type_field(response.get("records", []), "User Login"))

if remove_duplicates is True:
records = self.remove_duplicates(records)

records = [record.__dict__ for record in records]

return convert_to_camel_case(clean(records)), state, has_more_pages, 200, None
except ApiException as error:
return [], state, False, error.status_code, error
except Exception as error:
return [], state, False, 500, PluginException(preset=PluginException.Preset.UNKNOWN, data=error)

def remove_duplicates(self, records: list) -> list:
"""
Remove duplicate entries from the provided list of records.
Args:
records (list): A list containing the records to be de-duplicated.
Returns:
list: A list containing only the unique records from the input list.
"""
unique_records = list(dict.fromkeys(records))
if len(records) != len(unique_records):
self.logger.info(
f"Removed {len(records) - len(unique_records)} duplicate from a total of {len(records)} duplicate records."
)
return unique_records

@staticmethod
def get_current_time() -> datetime:
return datetime.now(timezone.utc)
Expand All @@ -154,6 +186,8 @@ def convert_to_datetime(timestamp: str) -> datetime:

@staticmethod
def add_data_type_field(records: list, field_value: str) -> list:
event_records = []
for record in records:
record["dataType"] = field_value
return records
event_records.append(UserEvent(**record))
return event_records
27 changes: 27 additions & 0 deletions plugins/salesforce/komand_salesforce/util/event.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
from dataclasses import dataclass, fields

from typing import Optional


@dataclass(frozen=True, eq=True)
class UserEvent:
attributes: dict
dataType: str
Id: Optional[str] = None
FirstName: Optional[str] = None
LastName: Optional[str] = None
Email: Optional[str] = None
Alias: Optional[str] = None
IsActive: Optional[bool] = None
LoginTime: Optional[str] = None
UserId: Optional[str] = None
LoginType: Optional[str] = None
LoginUrl: Optional[str] = None
SourceIp: Optional[str] = None
Status: Optional[str] = None
Application: Optional[str] = None
Browser: Optional[str] = None

def __hash__(self):
exclude_fields = ["attributes"]
return hash(tuple(getattr(self, field.name) for field in fields(self) if field.name not in exclude_fields))

0 comments on commit 22d64f6

Please sign in to comment.