Skip to content

Commit

Permalink
Merge pull request #496 from GhostManager/hotfix/adjustments-for-logging
Browse files Browse the repository at this point in the history
Hotfix: Adjustments for Activity Logging
  • Loading branch information
chrismaddalena authored Aug 7, 2024
2 parents 0e5a2db + 9a5c6c7 commit 6bfb992
Show file tree
Hide file tree
Showing 6 changed files with 68 additions and 35 deletions.
13 changes: 12 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,18 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

### [4.2.4] - 29 July 2024
### [4.2.5] - 7 August 2024

### Changed

* Changed filtered activity logs to sort by the start date instead of relevancy rank

### Fixed

* Fixed activity logs not loading additional entries when scrolling to the bottom of the page
* Fixed an issue that could cause an error when importing an activity log csv file with one or more individual cells with content exceeding 128KB

### [4.2.4] - 29 July 2024

### Changed

Expand Down
4 changes: 2 additions & 2 deletions VERSION
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
v4.2.4
29 July 2024
v4.2.5
9 August 2024
4 changes: 2 additions & 2 deletions config/settings/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,9 @@
# 3rd Party Libraries
import environ

__version__ = "4.2.4"
__version__ = "4.2.5"
VERSION = __version__
RELEASE_DATE = "29 July 2024"
RELEASE_DATE = "9 August 2024"

ROOT_DIR = Path(__file__).resolve(strict=True).parent.parent.parent
APPS_DIR = ROOT_DIR / "ghostwriter"
Expand Down
68 changes: 40 additions & 28 deletions ghostwriter/oplog/consumers.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ class TsVectorConcat(Func):
Unlike Django's built in Concat function, this does not convert each argument to text first, so
it can be used with tsvectors.
"""

template = "(%(expressions)s)"
arg_joiner = " || "
output_field = SearchVectorField()
Expand All @@ -54,9 +55,7 @@ def create_oplog_entry(oplog_id, user):

if verify_access(user, oplog.project):
OplogEntry.objects.create(
oplog_id_id=oplog_id,
operator_name=user.username,
extra_fields=ExtraFieldSpec.initial_json(OplogEntry)
oplog_id_id=oplog_id, operator_name=user.username, extra_fields=ExtraFieldSpec.initial_json(OplogEntry)
)
else:
logger.warning(
Expand Down Expand Up @@ -148,23 +147,28 @@ def get_log_entries(self, oplog_id: int, offset: int, user: User, filter: str |
]

# Subquery to fetch tags
simple_vector_args.append(Subquery(
TaggedItem.objects.filter(
content_type__app_label=OplogEntry._meta.app_label,
content_type__model=OplogEntry._meta.model_name,
object_id=OuterRef("pk"),
).annotate(
all_tags=Func(F("tag__name"), Value(" "), function="STRING_AGG")
).values("all_tags")
))
simple_vector_args.append(
Subquery(
TaggedItem.objects.filter(
content_type__app_label=OplogEntry._meta.app_label,
content_type__model=OplogEntry._meta.model_name,
object_id=OuterRef("pk"),
)
.annotate(all_tags=Func(F("tag__name"), Value(" "), function="STRING_AGG"))
.values("all_tags")
)
)

# JSON operations to fetch extra fields
for spec in ExtraFieldSpec.objects.filter(target_model=OplogEntry._meta.label):
field = Cast(CombinedExpression(
F("extra_fields"),
"->>",
Value(spec.internal_name),
), TextField())
field = Cast(
CombinedExpression(
F("extra_fields"),
"->>",
Value(spec.internal_name),
),
TextField(),
)
simple_vector_args.append(field)
if spec.type == "rich_text":
english_vector_args.append(field)
Expand All @@ -181,15 +185,21 @@ def get_log_entries(self, oplog_id: int, offset: int, user: User, filter: str |

def q_term(term):
term = "'" + term.replace("'", "''").replace("\\", "\\\\") + "':*"
return SearchQuery(term, config="english", search_type="raw") | SearchQuery(term, config="simple", search_type="raw")
return SearchQuery(term, config="english", search_type="raw") | SearchQuery(
term, config="simple", search_type="raw"
)

query = reduce(lambda a, b: a & b, (q_term(term) for term in filter.split()))

# Run query
entries = entries.annotate(
search=vector,
rank=SearchRank(vector, query),
).filter(search=query).order_by("-rank")
entries = (
entries.annotate(
search=vector,
rank=SearchRank(vector, query),
)
.filter(search=query)
.order_by("-start_date")
)
else:
entries = entries.order_by("-start_date")
entries = entries[offset : offset + 100]
Expand Down Expand Up @@ -227,11 +237,13 @@ async def receive(self, text_data=None, bytes_data=None):
offset = json_data["offset"]
filter = json_data.get("filter", "")
entries = await self.get_log_entries(oplog_id, offset, user, filter)
message = json.dumps({
"action": "sync",
"filter": filter,
"offset": offset,
"data": entries,
})
message = json.dumps(
{
"action": "sync",
"filter": filter,
"offset": offset,
"data": entries,
}
)

await self.send(text_data=message)
2 changes: 1 addition & 1 deletion ghostwriter/oplog/templates/oplog/oplog_import.html
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ <h2>Upload Operation Log Entries CSV</h2>
<label for="oplog_id"></label>
<select class="form-control" id="oplog_log" name="oplog_id" required {% if not logs %}disabled{% endif %}>
{% if logs %}
<option value="0">-- Select a Target Log --</option>
<option value="">-- Select a Target Log --</option>
{% for log in logs %}
<option value="{{ log.id }}" {% if initial_log.id == log.id %}selected="selected"{% endif %}>{{ log.name }} (ID {{ log.id }})</option>
{% endfor %}
Expand Down
12 changes: 11 additions & 1 deletion ghostwriter/oplog/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,9 +258,19 @@ def validate_log_selection(user, oplog_id):

def import_data(request, oplog_id, new_entries, dry_run=False):
"""Import the data into a dataset for validation and import."""
logger.info("Importing log data for log ID %s", oplog_id)
dataset = Dataset()
oplog_entry_resource = OplogEntryResource()
imported_data = dataset.load(new_entries, format="csv")
try:
imported_data = dataset.load(new_entries, format="csv")
except csv.Error as exception: # pragma: no cover
logger.error("An error occurred while loading the CSV file for log import: %s", exception)
messages.error(
request,
"Your log file could not be loaded. There may be cells that exceed the 128KB text size limit for CSVs.",
extra_tags="alert-error",
)
return None

if "oplog_id" in imported_data.headers:
del imported_data["oplog_id"]
Expand Down

0 comments on commit 6bfb992

Please sign in to comment.