diff --git a/cas-pipeline.conf b/cas-pipeline.conf index 4b5b571..d23b3a0 100644 --- a/cas-pipeline.conf +++ b/cas-pipeline.conf @@ -5,7 +5,7 @@ input { file { # Use a generic path for the log file. Update this to your specific log file location. - path => "/var/log/cas/*_service_*_idp_audit.log" + path => "/var/log/cas/*_audit.log" start_position => "beginning" # The sincedb_path is set to /dev/null for testing. For production, # consider setting a relative path, e.g., "./.sincedb_cas-audit". @@ -25,8 +25,8 @@ filter { # The first gsub removes ANSI escape codes, which are sometimes present in the logs. # The second gsub removes the leading header line for each audit trail record. gsub => [ - "message", "\e\[(\d+;)*\d+m", "", - "message", "^\s*20\d{2}-\d{2}-\d{2} \d{2}:\d{2}:\\d{2},\\d{3} INFO.*Audit trail record BEGIN\n=============================================================\n", "" + "message", "(\e\[m)?\e\[(\d+;)*\d+m", "", + "message", "^\s*20\d{2}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d{3} INFO.*Audit trail record BEGIN\n=============================================================\n", "" ] } @@ -34,7 +34,7 @@ filter { # This pattern is designed to extract key fields from the audit trail body. grok { match => { - "message" => "=============================================================\nWHEN: %{TIMESTAMP_ISO8601:timestamp}\nWHO: %{DATA:subject}\nWHAT: %{GREEDYDATA:what}\nACTION: %{WORD:action}\nCLIENT_IP: %{IP:ip_address}\nSERVER_IP: %{IP:server_ip}" + "message" => "WHEN: %{TIMESTAMP_ISO8601:timestamp}\nWHO: %{DATA:subject}\nWHAT: %{GREEDYDATA:what}\nACTION: %{WORD:action}\nCLIENT_IP: %{DATA:ip_address}\nSERVER_IP: %{DATA:server_ip}" } } @@ -167,6 +167,17 @@ filter { } } + # Make a best effort guess for destination system / object value + if [action] in [ "DELEGATED_CLIENT_SUCCESS", "SERVICE_TICKET_CREATED", "SERVICE_TICKET_VALIDATE_SUCCESS", "OAUTH2_USER_PROFILE_CREATED", "OAUTH2_ACCESS_TOKEN_REQUEST_CREATED", "SAML2_RESPONSE_CREATED" ] { + # Extract the service URI and use it for both the object and destination_system + grok { + # Extract the service URI being at the end of the line + # by matching both the trailing comma and the final curly bracket. + match => { "what" => "service=%{URI:destination_system}(?:,|})" } + add_field => { "object" => "%{destination_system}" } + } + } + # The geoip filter enriches the event with geographical information based on the IP address. if [ip_address] { geoip { @@ -174,7 +185,7 @@ filter { target => "geoip" # Use a generic path for the GeoLite2 database. Logstash usually looks in its own 'data' directory. # Replace this with the actual path to your database file if the generic path fails. - database => "GeoLite2-Country.mmdb" + #database => "GeoLite2-Country.mmdb" fields => ["country_name", "country_code2"] } } @@ -236,9 +247,9 @@ filter { acct["context"] = event.get("context") end - if event.get("[geoip][country_name]") && event.get("[geoip][country_code2]") - acct["geoip_country"] = event.get("[geoip][country_name]") - acct["geoip_country_code"] = event.get("[geoip][country_code2]") + if event.get("[geoip][geo][country_name]") && event.get("[geoip][geo][country_iso_code]") + acct["geoip_country"] = event.get("[geoip][geo][country_name]") + acct["geoip_country_code"] = event.get("[geoip][geo][country_iso_code]") end event.set("accounting", acct)