Skip to content

Commit

Permalink
#70 add logging to check up on possible cause of failure of last run
Browse files Browse the repository at this point in the history
  • Loading branch information
paulhoule committed Nov 22, 2013
1 parent 6c317c5 commit 1e3fdf2
Show file tree
Hide file tree
Showing 2 changed files with 26 additions and 16 deletions.
33 changes: 18 additions & 15 deletions .idea/workspace.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
import com.google.common.collect.Lists;
import com.google.common.collect.PeekingIterator;
import com.ontology2.bakemono.Main;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
Expand All @@ -20,6 +22,7 @@
import java.util.List;

public class FetchTriplesWithMatchingObjectsTool implements Tool {
private static Log logger= LogFactory.getLog(FetchTriplesWithMatchingObjectsTool.class);
private Configuration conf;

@Override
Expand All @@ -44,13 +47,17 @@ public int run(String[] arg0) throws Exception {
// The first argument is the list of objects
String inputA=a.next();

if (!a.hasNext())
usage();

// Middle positional parameters are sources of triples
List<String> paths= Lists.newArrayList(a);

// The last positional parameter is the output path
String output=paths.get(paths.size()-1);
String output=paths.get(paths.size() - 1);
paths.remove(paths.size()-1);

logger.info("Writing to output path "+output);
conf.set("mapred.compress.map.output", "true");
conf.set("mapred.output.compression.type", "BLOCK");
conf.set("mapred.map.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
Expand Down

0 comments on commit 1e3fdf2

Please sign in to comment.