Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add travis build step and fix javadoc formatting #80

Merged
merged 3 commits into from
Feb 27, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 18 additions & 6 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,9 +1,21 @@
language: java
git:
depth: 3
jdk:
- oraclejdk8
env:
- HADOOP_VERSION=2.7.6
- HADOOP_VERSION=2.8.4
script: "./gradlew test --stacktrace --info -Ddyno.hadoop.bin.version=${HADOOP_VERSION}"
git:
depth: 3
install: true

jobs:
include:
- stage: test
name: "Build"
script: "./gradlew build -x test"
- stage: test
script: "./gradlew test --stacktrace --info -Ddyno.hadoop.bin.version=${HADOOP_VERSION}"
env: HADOOP_VERSION=2.7.6
- stage: test
script: "./gradlew test --stacktrace --info -Ddyno.hadoop.bin.version=${HADOOP_VERSION}"
env: HADOOP_VERSION=2.8.4

stages:
- test
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,11 @@

/**
* An {@link InputFormat} which is time-based. Starts at some timestamp (specified by the
* {@value WorkloadDriver#START_TIMESTAMP_MS configuration) and runs for a time specified by the
* {@value WorkloadDriver#START_TIMESTAMP_MS} configuration) and runs for a time specified by the
* {@value DURATION_KEY} configuration. Spawns {@value NUM_MAPPERS_KEY} mappers. Both {@value DURATION_KEY}
* and {@value NUM_MAPPERS_KEY} are required.
*
* <p/>The values returned as the key by this InputFormat are just a sequential counter.
* <p>The values returned as the key by this InputFormat are just a sequential counter.
*/
public class TimedInputFormat extends InputFormat<LongWritable, NullWritable> {

Expand All @@ -48,14 +48,14 @@ public List<InputSplit> getSplits(JobContext job) {
public RecordReader<LongWritable, NullWritable> createRecordReader(InputSplit split, TaskAttemptContext context) {
return new TimedRecordReader();
}

public static List<String> getConfigDescriptions() {
return Lists.newArrayList(
NUM_MAPPERS_KEY + " (required): Number of mappers to launch.",
DURATION_KEY + " (required): Number of minutes to induce workload for."
);
}

public static boolean verifyConfigurations(Configuration conf) {
return conf.getInt(NUM_MAPPERS_KEY, -1) != -1
&& conf.getTimeDuration(DURATION_KEY, -1, TimeUnit.MILLISECONDS) != -1;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,14 @@
* Where relativeTimestampMs represents the time elapsed between the start of
* the audit log and the occurrence of the audit event. Assuming your audit
* logs are available in Hive, this can be generated with a query looking like:
* <pre>
* <pre>{@code
* INSERT OVERWRITE DIRECTORY '${outputPath}'
* SELECT (timestamp - ${startTimestamp} AS relativeTimestamp, ugi, cmd, src, dst, ip
* FROM '${auditLogTableLocation}'
* WHERE timestamp >= ${startTimestamp} AND timestamp < ${endTimestamp}
* DISTRIBUTE BY src
* SORT BY relativeTimestamp ASC;
* </pre>
* }</pre>
* Note that the sorting step is important; events in each distinct file must be in
* time-ascending order.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
import org.apache.hadoop.io.WritableComparable;

/**
* UserCommandKey is a {@link Writable} used as a composite key combining the user id and
* UserCommandKey is a {@link WritableComparable} used as a composite key combining the user id and
* type of a replayed command. It is used as the output key for AuditReplayMapper and the
* keys for AuditReplayReducer.
*/
Expand Down