Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Updating to hbase 1.0.0 and adding Import/Export map/reduce jobs... #116

Closed
wants to merge 9 commits into from
12 changes: 11 additions & 1 deletion bigtable-hbase/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,6 @@
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
Expand Down Expand Up @@ -280,6 +279,17 @@
<artifactId>maven-surefire-plugin</artifactId>
<version>2.17</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<configuration>
<archive>
<manifest>
<mainClass>com.google.cloud.bigtable.mapreduce.Driver</mainClass>
</manifest>
</archive>
</configuration>
</plugin>
</plugins>
</pluginManagement>
<plugins>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,10 +54,12 @@
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.BufferedMutator;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
Expand All @@ -78,7 +80,10 @@
import java.util.Map;
import java.util.concurrent.ExecutorService;

public class BigtableTable implements Table {
/*
* TODO(sduskis): Create an implementation of BufferedMutator that actually does buffered mutations.
*/
public class BigtableTable implements Table, BufferedMutator {
protected static final Logger LOG = new Logger(BigtableTable.class);

protected final TableName tableName;
Expand Down Expand Up @@ -162,10 +167,10 @@ public Configuration getConfiguration() {
return this.configuration;
}

public ExecutorService getPool(){
public ExecutorService getPool(){
return this.executorService;
}

@Override
public HTableDescriptor getTableDescriptor() throws IOException {
// TODO: Also include column family information
Expand Down Expand Up @@ -418,6 +423,24 @@ public void mutateRow(RowMutations rm) throws IOException {
}
}

@Override
public void mutate(Mutation m) throws IOException {
AnviltopData.RowMutation.Builder mutation = mutationAdapter.adapt(m);
MutateRowRequest.Builder request = makeMutateRowRequest(mutation);

try {
client.mutateAtomic(request.build());
} catch (ServiceException e) {
LOG.error("Encountered ServiceException when executing mutate. Exception: %s", e);
throw new IOException("Failed to mutate.", e);
}
}

@Override
public void mutate(List<? extends Mutation> mutations) throws IOException {
batchExecutor.batch(mutations);
}

@Override
public Result append(Append append) throws IOException {
LOG.trace("append(Append)");
Expand Down Expand Up @@ -495,19 +518,14 @@ public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, lo
}

@Override
public boolean isAutoFlush() {
LOG.trace("isAutoFlush()");
return true;
}

@Override
public void flushCommits() throws IOException {
LOG.error("Unsupported flushCommits() called.");
public void flush() throws IOException {
LOG.error("Unsupported flush() called.");
throw new UnsupportedOperationException(); // TODO
}

@Override
public void close() throws IOException {
// TODO: shutdown the executor.
}

@Override
Expand All @@ -532,11 +550,6 @@ public <T extends Service, R> void coprocessorService(Class<T> service, byte[] s
throw new UnsupportedOperationException(); // TODO
}

@Override
public void setAutoFlushTo(boolean autoFlush) {
LOG.warn("setAutoFlushTo(%s), but is currently a NOP", autoFlush);
}

@Override
public long getWriteBufferSize() {
LOG.error("Unsupported getWriteBufferSize() called");
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
/** Copyright (c) 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.bigtable.mapreduce;

import org.apache.hadoop.hbase.mapreduce.Export;
import org.apache.hadoop.util.ProgramDriver;

/**
* Driver for bigtable mapreduce jobs. Select which to run by passing
* name of job to this main.
*/
public class Driver {

public static void main(String[] args) {
ProgramDriver programDriver = new ProgramDriver();
int exitCode = -1;
try {
programDriver.addClass("export-table", Export.class,
"A map/reduce program that exports a table to a file.");
programDriver.addClass("import-table", Import.class,
"A map/reduce program that imports a table to a file.");
programDriver.driver(args);
exitCode = programDriver.run(args);
} catch (Throwable e) {
e.printStackTrace();
}
System.exit(exitCode);
}
}
Loading