Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 19 additions & 8 deletions cas-pipeline.conf
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
input {
file {
# Use a generic path for the log file. Update this to your specific log file location.
path => "/var/log/cas/*_service_*_idp_audit.log"
path => "/var/log/cas/*_audit.log"
start_position => "beginning"
# The sincedb_path is set to /dev/null for testing. For production,
# consider setting a relative path, e.g., "./.sincedb_cas-audit".
Expand All @@ -25,16 +25,16 @@ filter {
# The first gsub removes ANSI escape codes, which are sometimes present in the logs.
# The second gsub removes the leading header line for each audit trail record.
gsub => [
"message", "\e\[(\d+;)*\d+m", "",
"message", "^\s*20\d{2}-\d{2}-\d{2} \d{2}:\d{2}:\\d{2},\\d{3} INFO.*Audit trail record BEGIN\n=============================================================\n", ""
"message", "(\e\[m)?\e\[(\d+;)*\d+m", "",
"message", "^\s*20\d{2}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d{3} INFO.*Audit trail record BEGIN\n=============================================================\n", ""
]
}

# The grok filter is the main workhorse for parsing unstructured log data.
# This pattern is designed to extract key fields from the audit trail body.
grok {
match => {
"message" => "=============================================================\nWHEN: %{TIMESTAMP_ISO8601:timestamp}\nWHO: %{DATA:subject}\nWHAT: %{GREEDYDATA:what}\nACTION: %{WORD:action}\nCLIENT_IP: %{IP:ip_address}\nSERVER_IP: %{IP:server_ip}"
"message" => "WHEN: %{TIMESTAMP_ISO8601:timestamp}\nWHO: %{DATA:subject}\nWHAT: %{GREEDYDATA:what}\nACTION: %{WORD:action}\nCLIENT_IP: %{DATA:ip_address}\nSERVER_IP: %{DATA:server_ip}"
}
}

Expand Down Expand Up @@ -167,14 +167,25 @@ filter {
}
}

# Make a best effort guess for destination system / object value
if [action] in [ "DELEGATED_CLIENT_SUCCESS", "SERVICE_TICKET_CREATED", "SERVICE_TICKET_VALIDATE_SUCCESS", "OAUTH2_USER_PROFILE_CREATED", "OAUTH2_ACCESS_TOKEN_REQUEST_CREATED", "SAML2_RESPONSE_CREATED" ] {
# Extract the service URI and use it for both the object and destination_system
grok {
# Extract the service URI being at the end of the line
# by matching both the trailing comma and the final curly bracket.
match => { "what" => "service=%{URI:destination_system}(?:,|})" }
add_field => { "object" => "%{destination_system}" }
}
}

# The geoip filter enriches the event with geographical information based on the IP address.
if [ip_address] {
geoip {
source => "ip_address"
target => "geoip"
# Use a generic path for the GeoLite2 database. Logstash usually looks in its own 'data' directory.
# Replace this with the actual path to your database file if the generic path fails.
database => "GeoLite2-Country.mmdb"
#database => "GeoLite2-Country.mmdb"
fields => ["country_name", "country_code2"]
}
}
Expand Down Expand Up @@ -236,9 +247,9 @@ filter {
acct["context"] = event.get("context")
end

if event.get("[geoip][country_name]") && event.get("[geoip][country_code2]")
acct["geoip_country"] = event.get("[geoip][country_name]")
acct["geoip_country_code"] = event.get("[geoip][country_code2]")
if event.get("[geoip][geo][country_name]") && event.get("[geoip][geo][country_iso_code]")
acct["geoip_country"] = event.get("[geoip][geo][country_name]")
acct["geoip_country_code"] = event.get("[geoip][geo][country_iso_code]")
end

event.set("accounting", acct)
Expand Down