Skip to content
This repository has been archived by the owner on Nov 30, 2022. It is now read-only.

Create finished audit log for privacy requests #1040

Merged
merged 13 commits into from
Aug 9, 2022
Merged
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ The types of changes are:
* Access and erasure support for Auth0 [#991](https://github.com/ethyca/fidesops/pull/991)
* Start better understanding how request execution fails [#993](https://github.com/ethyca/fidesops/pull/993)
* Add approval `AuditLog`s for user and sytem approved privacy requests [#1038](https://github.com/ethyca/fidesops/pull/1038)
* Add finished `AuditLog` for subject requests [#1040](https://github.com/ethyca/fidesops/pull/1040)

### Changed

Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ fastapi-caching[redis]
fastapi-pagination[sqlalchemy]~= 0.9.3
fastapi[all]==0.79.0
fideslang==1.0.0
fideslib==3.0.0
fideslib==3.0.2
fideslog==1.2.2
hvac==0.11.2
multidimensional_urlencode==0.0.4
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
"""Add finished audit log

Revision ID: bab75915670a
Revises: 3c5e1253465d
Create Date: 2022-08-04 20:18:29.339116

"""
import sqlalchemy as sa
from alembic import op

# revision identifiers, used by Alembic.
revision = "bab75915670a"
down_revision = "3c5e1253465d"
branch_labels = None
depends_on = None


def upgrade():
op.execute("alter type auditlogaction add value 'finished'")


def downgrade():
op.execute("delete from auditlog where action in ('finished')")
op.execute("alter type auditlogaction rename to auditlogaction_old")
op.execute("create type auditlogaction as enum('approved', 'denied')")
op.execute(
(
"alter table auditlog alter column action type auditlogaction using "
"action::text::auditlogaction"
)
)
op.execute("drop type auditlogaction_old")
10 changes: 10 additions & 0 deletions src/fidesops/service/privacy_request/request_runner_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from celery import Task
from celery.utils.log import get_task_logger
from fideslib.db.session import get_db_session
from fideslib.models.audit_log import AuditLog, AuditLogAction
from pydantic import ValidationError
from redis.exceptions import DataError
from sqlalchemy.orm import Session
Expand Down Expand Up @@ -281,6 +282,15 @@ def run_privacy_request(
return

privacy_request.finished_processing_at = datetime.utcnow()
AuditLog.create(
db=session,
data={
"user_id": "system",
"privacy_request_id": privacy_request.id,
"action": AuditLogAction.finished,
"message": "",
},
)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Something that would be nice here is to wrap this new AuditLog creation into a transaction with privacy_request.save(...) — if one of these operations fails without the other we'd end up with an incomplete state, so it's better to commit them both at once.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Good find. That is a potential issue when approved/denied are created as well. All of the audit logs should probably be updated at the same time when this is done.

Would you like this to be addressed in this PR or a follow up?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Let's do it as a follow-up

privacy_request.status = PrivacyRequestStatus.complete
privacy_request.save(db=session)
logging.info(f"Privacy request {privacy_request.id} run completed.")
Expand Down
88 changes: 47 additions & 41 deletions tests/ops/integration_tests/test_external_database_connections.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,58 +71,64 @@ def snowflake_test_engine() -> Generator:
def test_redshift_example_data(redshift_test_engine):
"""Confirm that we can connect to the redshift test db and get table names"""
inspector = inspect(redshift_test_engine)
assert inspector.get_table_names(schema="test") == [
"report",
"service_request",
"login",
"visit",
"order_item",
"order",
"payment_card",
"employee",
"customer",
"address",
"product",
]
assert sorted(inspector.get_table_names(schema="test")) == sorted(
[
"report",
"service_request",
"login",
"visit",
"order_item",
"order",
"payment_card",
"employee",
"customer",
"address",
"product",
]
)


@pytest.mark.integration_external
@pytest.mark.integration_snowflake
def test_snowflake_example_data(snowflake_test_engine):
"""Confirm that we can connect to the snowflake test db and get table names"""
inspector = inspect(snowflake_test_engine)
assert inspector.get_table_names(schema="test") == [
"cc",
"report",
"address",
"customer",
"employee",
"login",
"order",
"order_item",
"payment_card",
"product",
"report",
"service_request",
"visit",
]
assert sorted(inspector.get_table_names(schema="test")) == sorted(
[
"cc",
"report",
"address",
"customer",
"employee",
"login",
"order",
"order_item",
"payment_card",
"product",
"report",
"service_request",
"visit",
]
)


@pytest.mark.integration_external
@pytest.mark.integration_bigquery
def test_bigquery_example_data(bigquery_test_engine):
"""Confirm that we can connect to the bigquery test db and get table names"""
inspector = inspect(bigquery_test_engine)
assert inspector.get_table_names(schema="fidesopstest") == [
"address",
"customer",
"employee",
"login",
"order_item",
"orders",
"payment_card",
"product",
"report",
"service_request",
"visit",
]
assert sorted(inspector.get_table_names(schema="fidesopstest")) == sorted(
[
"address",
"customer",
"employee",
"login",
"order_item",
"orders",
"payment_card",
"product",
"report",
"service_request",
"visit",
]
)
8 changes: 4 additions & 4 deletions tests/ops/models/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@ def test_create_client_and_secret(self, db: Session) -> None:
assert new_client.hashed_secret is not None
assert (
hash_with_salt(
secret.encode(config.security.ENCODING),
new_client.salt.encode(config.security.ENCODING),
secret.encode(config.security.encoding),
new_client.salt.encode(config.security.encoding),
)
== new_client.hashed_secret
)
Expand All @@ -32,8 +32,8 @@ def test_get_client(self, db: Session, oauth_client) -> None:
)

hashed_access_key = hash_with_salt(
config.security.oauth_root_client_secret.encode(config.security.ENCODING),
client.salt.encode(config.security.ENCODING),
config.security.oauth_root_client_secret.encode(config.security.encoding),
client.salt.encode(config.security.encoding),
)

assert "fidesopsadmin" == client.id
Expand Down
12 changes: 12 additions & 0 deletions tests/ops/service/privacy_request/request_runner_service_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

import pydash
import pytest
from fideslib.models.audit_log import AuditLog, AuditLogAction
from pydantic import ValidationError
from sqlalchemy import column, select, table
from sqlalchemy.orm import Session
Expand Down Expand Up @@ -288,6 +289,17 @@ def test_create_and_process_access_request(
assert results[visit_key][0]["email"] == customer_email
log_id = pr.execution_logs[0].id
pr_id = pr.id

finished_audit_log: AuditLog = AuditLog.filter(
db=db,
conditions=(
(AuditLog.privacy_request_id == pr_id)
& (AuditLog.action == AuditLogAction.finished)
),
).first()

assert finished_audit_log is not None

# Both pre-execution webhooks and both post-execution webhooks were called
assert trigger_webhook_mock.call_count == 4

Expand Down
14 changes: 7 additions & 7 deletions tests/ops/service/storage_uploader_service_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -386,9 +386,9 @@ def test_encrypted_json(self, data, privacy_request_with_encryption_keys):
)
assert isinstance(buff, BytesIO)
encrypted = buff.read()
data = encrypted.decode(config.security.ENCODING)
data = encrypted.decode(config.security.encoding)
decrypted = decrypt_combined_nonce_and_message(
data, self.key.encode(config.security.ENCODING)
data, self.key.encode(config.security.encoding)
)
assert json.loads(decrypted) == original_data

Expand All @@ -401,14 +401,14 @@ def test_encrypted_csv(self, data, privacy_request_with_encryption_keys):
zipfile = ZipFile(buff)

with zipfile.open("mongo:address.csv", "r") as address_csv:
data = address_csv.read().decode(config.security.ENCODING)
data = address_csv.read().decode(config.security.encoding)

decrypted = decrypt_combined_nonce_and_message(
data, self.key.encode(config.security.ENCODING)
data, self.key.encode(config.security.encoding)
)

binary_stream = BytesIO(decrypted.encode(config.security.ENCODING))
df = pd.read_csv(binary_stream, encoding=config.security.ENCODING)
binary_stream = BytesIO(decrypted.encode(config.security.encoding))
df = pd.read_csv(binary_stream, encoding=config.security.encoding)
assert list(df.columns) == [
"id",
"zip",
Expand Down Expand Up @@ -440,7 +440,7 @@ def test_key_and_nonce_set(self, privacy_request):
data = "test data"
ret = encrypt_access_request_results(data, request_id=privacy_request.id)
decrypted = decrypt_combined_nonce_and_message(
ret, key.encode(config.security.ENCODING)
ret, key.encode(config.security.encoding)
)
assert data == decrypted

Expand Down