Skip to content

Commit

Permalink
Support import from HBase snapshot
Browse files Browse the repository at this point in the history
New files for configuring HBaseSnapshotInputFormat

resovle version conflict and upgrade Beam version to 2.24.0

revert disk option change, not enough quota

Code reorg

code reduction

Refactor naming
Add integration config

Add unit test for HBaseSnapshotInputConfiguration

Set up skeleton for integration testing

Ship test data with code, integration tests pass

Clean up code for PR

Add HBase commands that generates our test snapshot
  • Loading branch information
lichng committed Dec 11, 2020
1 parent b6bf3c7 commit b99c98a
Show file tree
Hide file tree
Showing 34 changed files with 690 additions and 14 deletions.
75 changes: 70 additions & 5 deletions bigtable-dataflow-parent/bigtable-beam-import/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,8 @@ limitations under the License.
<artifactId>bigtable-beam-import</artifactId>

<properties>
<mainClass>com.google.cloud.bigtable.beam.sequencefiles.Main</mainClass>
<mainClass>com.google.cloud.bigtable.beam.Main</mainClass>
<skipITs>false</skipITs>
</properties>

<dependencies>
Expand Down Expand Up @@ -61,14 +62,12 @@ limitations under the License.
</dependency>
<dependency>
<groupId>org.apache.beam</groupId>
<artifactId>
beam-sdks-java-extensions-google-cloud-platform-core
</artifactId>
<artifactId>beam-sdks-java-io-hadoop-common</artifactId>
<version>${beam.version}</version>
</dependency>
<dependency>
<groupId>org.apache.beam</groupId>
<artifactId>beam-sdks-java-io-hadoop-common</artifactId>
<artifactId>beam-sdks-java-io-hadoop-format</artifactId>
<version>${beam.version}</version>
</dependency>

Expand All @@ -78,6 +77,21 @@ limitations under the License.
<version>${hbase.version}</version>
</dependency>

<!-- For HBase 2.x, this should be hbase-mapreduce
https://hbase.apache.org/2.1/book.html#export
-->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-shaded-server</artifactId>
<version>${hbase.version}</version>
</dependency>

<!-- https://mvnrepository.com/artifact/org.apache.hbase/hbase-common -->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId>
<version>${hbase.version}</version>
</dependency>
<dependency>
<groupId>com.google.auto.value</groupId>
<artifactId>auto-value</artifactId>
Expand Down Expand Up @@ -133,6 +147,13 @@ limitations under the License.
<artifactId>slf4j-api</artifactId>
<version>${slf4j.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.google.cloud.bigdataoss/gcs-connector -->
<dependency>
<groupId>com.google.cloud.bigdataoss</groupId>
<artifactId>gcs-connector</artifactId>
<version>hadoop2-2.1.4</version>
<classifier>shaded</classifier>
</dependency>

<!-- Test -->
<dependency>
Expand Down Expand Up @@ -165,6 +186,12 @@ limitations under the License.
<version>${junit.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-shaded-testing-util</artifactId>
<version>${hbase.version}</version>
<scope>test</scope>
</dependency>
</dependencies>

<build>
Expand Down Expand Up @@ -249,6 +276,16 @@ limitations under the License.
<transformers>
<transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
</transformers>
<filters>
<filter>
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
</configuration>
</plugin>

Expand Down Expand Up @@ -360,5 +397,33 @@ limitations under the License.
</plugins>
</build>
</profile>

<profile>
<id>hbasesnapshotsIntegrationTest</id>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<executions>
<execution>
<id>hbasesnapshots-integration-test</id>
<goals>
<goal>integration-test</goal>
</goals>
<phase>integration-test</phase>
<configuration>
<forkCount>1</forkCount>
<includes>
<include>**/hbasesnapshots/*IT.java</include>
</includes>
<useSystemClassLoader>false</useSystemClassLoader>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,14 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.bigtable.beam.sequencefiles;
package com.google.cloud.bigtable.beam;

import com.google.bigtable.repackaged.com.google.api.core.InternalApi;
import com.google.bigtable.repackaged.com.google.api.core.InternalExtensionOnly;
import com.google.cloud.bigtable.beam.hbasesnapshots.ImportJobFromHbaseSnapshot;
import com.google.cloud.bigtable.beam.sequencefiles.CreateTableHelper;
import com.google.cloud.bigtable.beam.sequencefiles.ExportJob;
import com.google.cloud.bigtable.beam.sequencefiles.ImportJob;
import java.io.File;
import java.net.URISyntaxException;
import java.util.Arrays;
Expand All @@ -43,6 +47,9 @@ public static void main(String[] args) throws Exception {
case "import":
ImportJob.main(subArgs);
break;
case "importsnapshot":
ImportJobFromHbaseSnapshot.main(subArgs);
break;
case "create-table":
CreateTableHelper.main(subArgs);
break;
Expand All @@ -65,7 +72,7 @@ private static void usage() {

System.out.printf(
"java -jar %s <action> <action_params>\n"
+ "Where <action> can be 'export', 'import' or 'create-table'. To get further help, run: \n"
+ "Where <action> can be 'export', 'import' , 'importsnapshot' or 'create-table'. To get further help, run: \n"
+ "java -jar %s <action> --help\n",
jarName, jarName);
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
/*
* Copyright 2017 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.bigtable.beam.hbasesnapshots;

import static java.lang.System.*;

import com.google.common.base.Preconditions;
import org.apache.beam.sdk.io.hadoop.SerializableConfiguration;
import org.apache.beam.sdk.options.ValueProvider;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableInputFormat;
import org.apache.hadoop.hbase.mapreduce.TableSnapshotInputFormat;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.util.Base64;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.Job;

/**
* A {@link Configuration} that could be used in {@link HadoopFormatIO} for reading HBase snapshot
* hosted in Google Cloud Storage(GCS) bucket via GCS connector. It uses {@link
* TableSnapshotInputFormat} for reading HBase snapshots.
*/
class HBaseSnapshotInputConfiguration {

private static final Log LOG = LogFactory.getLog(HBaseSnapshotInputConfiguration.class);
private static final int BATCH_SIZE = 1000;

private final Configuration hbaseConf;

/**
* Constructs a new top level source.
*
* @param snapshotDir The path or pattern of the file(s) to read.
*/
HBaseSnapshotInputConfiguration(
ValueProvider<String> gcsProjectId,
ValueProvider<String> snapshotDir,
ValueProvider<String> snapshotName,
ValueProvider<String> restoreDir) {

Preconditions.checkArgument(
snapshotDir.toString().startsWith("gs://"),
"snapshot folder must be hosted in a GCS bucket ");

Configuration conf = HBaseConfiguration.create();
try {
conf.set("hbase.rootdir", snapshotDir.toString());
conf.set("fs.AbstractFileSystem.gs.impl", "com.google.cloud.hadoop.fs.gcs.GoogleHadoopFS");
conf.set("fs.gs.project.id", gcsProjectId.toString());
conf.set("fs.defaultFS", snapshotDir.toString());
conf.set("google.cloud.auth.service.account.enable", "true");
conf.setClass(
"mapreduce.job.inputformat.class", TableSnapshotInputFormat.class, InputFormat.class);
conf.setClass("key.class", ImmutableBytesWritable.class, Writable.class);
conf.setClass("value.class", Result.class, Object.class);
ClientProtos.Scan proto = ProtobufUtil.toScan(new Scan().setBatch(BATCH_SIZE));
conf.set(TableInputFormat.SCAN, Base64.encodeBytes(proto.toByteArray()));

this.LOG.debug(conf);
Job job = Job.getInstance(conf); // creates internal clone of hbaseConf
TableSnapshotInputFormat.setInput(
job, snapshotName.toString(), new Path(restoreDir.toString()));
conf = job.getConfiguration(); // extract the modified clone
} catch (Exception e) {
this.LOG.fatal(e);
}
this.hbaseConf = new SerializableConfiguration(conf).get();
}

public Configuration getHbaseConf() {
return hbaseConf;
}
}
Loading

0 comments on commit b99c98a

Please sign in to comment.