Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions hadoop-tools/hadoop-aws/dev-support/findbugs-exclude.xml
Original file line number Diff line number Diff line change
Expand Up @@ -86,4 +86,9 @@
<Method name="submit"/>
<Bug pattern="RV_RETURN_VALUE_IGNORED_BAD_PRACTICE"/>
</Match>

<Match>
<Class name="org.apache.hadoop.fs.s3a.audit.AvroS3LogEntryRecord"/>
<Bug pattern="NP_NULL_INSTANCEOF"/>
</Match>
</FindBugsFilter>
18 changes: 18 additions & 0 deletions hadoop-tools/hadoop-aws/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -458,6 +458,7 @@
<exclusion>org.apache.hadoop.fs.s3a.commit.impl.*</exclusion>
<exclusion>org.apache.hadoop.fs.s3a.commit.magic.*</exclusion>
<exclusion>org.apache.hadoop.fs.s3a.commit.staging.*</exclusion>
<exclusion>org.apache.hadoop.fs.s3a.audit.mapreduce.*</exclusion>
</exclusions>
<bannedImports>
<bannedImport>org.apache.hadoop.mapreduce.**</bannedImport>
Expand All @@ -469,6 +470,23 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.avro</groupId>
<artifactId>avro-maven-plugin</artifactId>
<executions>
<execution>
<id>generate-avro-sources</id>
<phase>generate-sources</phase>
<goals>
<goal>schema</goal>
</goals>
</execution>
</executions>
<configuration>
<sourceDirectory>src/main/java/org/apache/hadoop/fs/s3a/audit/avro</sourceDirectory>
<outputDirectory>${project.build.directory}/generated-sources/avro</outputDirectory>
</configuration>
</plugin>
</plugins>
</build>

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,247 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.hadoop.fs.s3a.audit;

import java.io.Closeable;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.URI;
import java.util.Arrays;
import java.util.List;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.s3a.audit.mapreduce.S3AAuditLogMergerAndParser;
import org.apache.hadoop.util.ExitUtil;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import static org.apache.hadoop.service.launcher.LauncherExitCodes.EXIT_COMMAND_ARGUMENT_ERROR;
import static org.apache.hadoop.service.launcher.LauncherExitCodes.EXIT_FAIL;
import static org.apache.hadoop.service.launcher.LauncherExitCodes.EXIT_SUCCESS;

/**
* AuditTool is a Command Line Interface.
* Its functionality is to parse the audit log files
* and generate avro file.
*/
public class AuditTool extends Configured implements Tool, Closeable {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

what about makign this something the hadoop s3guard can invoke?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think initially we went with that, but then changed it to be an alone audit log tool, not quite sure why we didn't go that route. Were there any plans to remove s3guard tool in the future, since we would have to separate it out then.


private static final Logger LOG = LoggerFactory.getLogger(AuditTool.class);

private final S3AAuditLogMergerAndParser s3AAuditLogMergerAndParser =
new S3AAuditLogMergerAndParser();

/**
* Name of this tool: {@value}.
*/
public static final String AUDIT_TOOL =
"org.apache.hadoop.fs.s3a.audit.AuditTool";

/**
* Purpose of this tool: {@value}.
*/
public static final String PURPOSE =
"\n\nUSAGE:\nMerge, parse audit log files and convert into avro file "
+ "for "
+ "better "
+ "visualization";

// Exit codes
private static final int SUCCESS = EXIT_SUCCESS;
private static final int FAILURE = EXIT_FAIL;
private static final int INVALID_ARGUMENT = EXIT_COMMAND_ARGUMENT_ERROR;

private static final String USAGE =
"hadoop " + AUDIT_TOOL +
" s3a://<bucket_name>/<destination_dir_path>/" +
" s3a://<bucket_name>/<audit_logs_dir_path>/" + "\n";

private PrintWriter out;

public AuditTool() {
super();
}

/**
* Tells us the usage of the AuditTool by commands.
*
* @return the string USAGE
*/
public String getUsage() {
return USAGE + PURPOSE;
}

public String getName() {
return AUDIT_TOOL;
}

/**
* This run method in AuditTool takes source and destination path of bucket,
* and check if there are directories and pass these paths to merge and
* parse audit log files.
*
* @param args argument list
* @return SUCCESS i.e, '0', which is an exit code
* @throws Exception on any failure.
*/
@Override
public int run(String[] args) throws Exception {
preConditionArgsSizeCheck(args);
List<String> paths = Arrays.asList(args);

// Path of audit log files
Path logsPath = new Path(paths.get(1));
// Path of destination directory
Path destPath = new Path(paths.get(0));

// Setting the file system
URI fsURI = new URI(logsPath.toString());
FileSystem fileSystem = FileSystem.get(fsURI, new Configuration());

FileStatus logsFileStatus = fileSystem.getFileStatus(logsPath);
if (logsFileStatus.isFile()) {
errorln("Expecting a directory, but " + logsPath.getName() + " is a"
+ " file which was passed as an argument");
throw invalidArgs(
"Expecting a directory, but " + logsPath.getName() + " is a"
+ " file which was passed as an argument");
}
FileStatus destinationFileStatus = fileSystem.getFileStatus(destPath);
if (destinationFileStatus.isFile()) {
errorln("Expecting a directory, but " + destPath.getName() + " is a"
+ " file which was passed as an argument");
throw invalidArgs(
"Expecting a directory, but " + destPath.getName() + " is a"
+ " file which was passed as an argument");
}

// Calls S3AAuditLogMergerAndParser for implementing merging, passing of
// audit log files and converting into avro file
boolean mergeAndParseResult =
s3AAuditLogMergerAndParser.mergeAndParseAuditLogFiles(
fileSystem, logsPath, destPath);
if (!mergeAndParseResult) {
return FAILURE;
}

return SUCCESS;
}

private void preConditionArgsSizeCheck(String[] args) {
if (args.length != 2) {
errorln(getUsage());
throw invalidArgs("Invalid number of arguments, please specify audit "
+ "log files directory as 1st argument and destination directory "
+ "as 2nd argument");
}
}

protected static void errorln(String x) {
System.err.println(x);
}

/**
* Build the exception to raise on invalid arguments.
*
* @param format string format
* @param args optional arguments for the string
* @return a new exception to throw
*/
protected static ExitUtil.ExitException invalidArgs(
String format, Object... args) {
return exitException(INVALID_ARGUMENT, format, args);
}

/**
* Build a exception to throw with a formatted message.
*
* @param exitCode exit code to use
* @param format string format
* @param args optional arguments for the string
* @return a new exception to throw
*/
protected static ExitUtil.ExitException exitException(
final int exitCode,
final String format,
final Object... args) {
return new ExitUtil.ExitException(exitCode,
String.format(format, args));
}

/**
* Flush all active output channels, including {@Code System.err},
* so as to stay in sync with any JRE log messages.
*/
private void flush() {
if (out != null) {
out.flush();
} else {
System.out.flush();
}
System.err.flush();
}

@Override
public void close() throws IOException {
flush();
if (out != null) {
out.close();
}
}

/**
* Inner entry point, with no logging or system exits.
*
* @param conf configuration
* @param argv argument list
* @return an exception
* @throws Exception Exception.
*/
public static int exec(Configuration conf, String... argv) throws Exception {
try (AuditTool auditTool = new AuditTool()) {
return ToolRunner.run(conf, auditTool, argv);
}
}

/**
* Main entry point.
*
* @param argv args list
*/
public static void main(String[] argv) {
try {
ExitUtil.terminate(exec(new Configuration(), argv));
} catch (ExitUtil.ExitException e) {
LOG.error("Exception while Terminating the command ran :{}",
e.toString());
System.exit(e.status);
} catch (Exception e) {
LOG.error("Exception while Terminating the command ran :{}",
e.toString(), e);
ExitUtil.halt(-1, e);
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

{
"type" : "record", "name" : "AvroS3LogEntryRecord",
"namespace" : "org.apache.hadoop.fs.s3a.audit",
"fields" : [
{ "name" : "turnaroundtime" , "type" : ["long", "null"] },
{ "name" : "remoteip", "type" : "string" },
{ "name" : "auth", "type" : "string" },
{ "name" : "useragent", "type" : "string" },
{ "name" : "hostid", "type" : "string" },
{ "name" : "requesturi", "type" : "string" },
{ "name" : "endpoint", "type" : "string" },
{ "name" : "bytessent", "type" : ["long", "null"] },
{ "name" : "cypher", "type" : "string" },
{ "name" : "key", "type" : "string" },
{ "name" : "timestamp", "type" : "string" },
{ "name" : "awserrorcode", "type" : "string" },
{ "name" : "owner", "type" : "string" },
{ "name" : "requester", "type" : "string" },
{ "name" : "objectsize", "type" : ["long", "null"] },
{ "name" : "tail", "type" : "string" },
{ "name" : "verb", "type" : "string" },
{ "name" : "version", "type" : "string" },
{ "name" : "bucket", "type" : "string" },
{ "name" : "sigv", "type" : "string" },
{ "name" : "referrer", "type" : "string" },
{ "name" : "totaltime", "type" : ["long", "null"] },
{ "name" : "requestid", "type" : "string" },
{ "name" : "http", "type" : "string" },
{ "name" : "tls", "type" : "string" },
{ "name" : "referrerMap", "type" : {"type": "map", "values": "string"} }
]
}
Loading