package llf.com;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableInputFormat;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
public class TabelFinder {
final static String NAME = "find table";
static class Exporter extends TableMapper<Text, Text> {
@SuppressWarnings("deprecation")
public void map(ImmutableBytesWritable rowk, Result result, Context context)
throws IOException, InterruptedException {
if (rowk == null || result == null) return;
KeyValue[] kvs = result.rawk();
String KeyA = "";
String KeyB = "";
String KeyC = "";
String KeyD = "";
String KeyE = "";
String KeyF = "";
for (KeyValue kv : kvs) {
String qualifier = Bytes.toString(kv.getQualifier());
String value = Bytes.toString(kv.getValue());
if (qualifier.equals("KeyA")) {
KeyA = value;
} else if (qualifier.equals("KeyB")) {
KeyB = value;
} else if (qualifier.equals("KeyC")) {
KeyC = value;
} else if (qualifier.equals("KeyD")) {
KeyD = value;
} else if (qualifier.equals("KeyE")) {
KeyE = value;
} else if (qualifier.equals("KeyF")) {
KeyF = value;
}
}
StringBuffer sb = new StringBuffer();
sb.append(KeyA).append("\\\");
sb.append(KeyB).append("\\\");
sb.append(KeyC).append("\\\");
sb.append(KeyD).append("\\\");
sb.append(KeyE).append("\\\");
sb.append(KeyF).append("\\\");
context.write(new Text(KeyF), new Text(sb.toString()));
}
}
@SuppressWarnings("deprecation")
public static Job createSubmittableJob(Configuration conf, String[] args) throws IOException {
String tableName = "xx_test";
Path outputDir = new Path(args[0]);
Job job = new Job(conf, NAME + " " + tableName);
job.setJobName(NAME + "_" + tableName);
job.setJarByClass(EXXXX.class);
Scan s = getConfiguredScanForJob(conf, args);
TableMapReduceUtil.initTableMapperJob(tableName, s, Exporter.class, null, null, job);
job.setNumReduceTasks(0);
job.setOutputFormatClass(EYYYY.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
FileOutputFormat.setOutputPath(job, outputDir);
return job;
}
private static Scan getConddsss(Configuration conf, String[] args) throws IOException {
Scan s = new Scan();
s.setTimeRange(141999999999, 145999999999);
s.addColumn("folm".getBytes(), "KeyA".getBytes());
s.addColumn("folm".getBytes(), "KeyB".getBytes());
s.addColumn("folm".getBytes(), "KeyC".getBytes());
s.addColumn("folm".getBytes(), "KeyD".getBytes());
s.addColumn("folm".getBytes(), "KeyE".getBytes());
s.addColumn("folm".getBytes(), "KeyF".getBytes());
s.setCaching(100);
s.setCacheBlocks(false);
return s;
}
private static void usage(final String errorMsg) {
if (errorMsg != null && errorMsg.length() > 0) {
System.err.println("ERROR: " + errorMsg);
}
System.err.println("Usage: TabelScanner [-D <property=value>]* <outputdir> \n");
System.err.println(" Note: -D properties will be applied to the conf used. ");
System.err.println(" For example: ");
System.err.println(" -D mapred.output.compress=true");
System.err.println(" -D mapred.output.compression.codec=org.apache.hadoop.io.compress.GzipCodec");
System.err.println(" -D mapred.output.compression.type=BLOCK");
System.err.println(" eeeeeeeee, the foououoproperties can be specified");
System.err.println(" to control/limit what is exported..");
System.err.println(" -D " + ggeewywy + "=<familyName>");
System.err.println("For performance consider the following properties:\n"
+ " -Dhbase.client.scanner.caching=100\n"
+ " -Dmapred.map.tasks.speculative.execution=false\n"
+ " -Dmapred.reduce.tasks.speculative.execution=false");
}
public static void main(String[] args) throws Exception {
Configuration conf = HBaseConfiguration.create();
String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
if (otherArgs.length < 1) {
usage("Wrong number of arguments: " + otherArgs.length);
System.exit(-1);
}
Job job = createSubmittableJob(conf, otherArgs);
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}
Hbase事例基本方法--60
最新推荐文章于 2021-08-09 19:11:11 发布