Java 类org.apache.hadoop.hbase.mapreduce.Export 实例源码

项目:cloud-bigtable-examples    文件:WordCountDriver.java   
public static void main(String[] args) {
  ProgramDriver programDriver = new ProgramDriver();
  int exitCode = -1;
  try {
    programDriver.addClass("wordcount-hbase", WordCountHBase.class,
        "A map/reduce program that counts the words in the input files.");
    programDriver.addClass("export-table", Export.class,
        "A map/reduce program that exports a table to a file.");
    //programDriver.addClass("cellcounter", CellCounter.class, "Count them cells!");
    programDriver.driver(args);
    exitCode = programDriver.run(args);
  } catch (Throwable e) {
    e.printStackTrace();
  }
  System.exit(exitCode);
}
项目:cloud-bigtable-client    文件:Driver.java   
public static void main(String[] args) {
  ProgramDriver programDriver = new ProgramDriver();
  int exitCode = -1;
  try {
    programDriver.addClass("export-table", Export.class,
        "A map/reduce program that exports a table to a file.");
    programDriver.addClass("import-table", Import.class,
        "A map/reduce program that imports a table to a file.");
    programDriver.driver(args);
    exitCode = programDriver.run(args);
  } catch (Throwable e) {
    e.printStackTrace();
  }
  System.exit(exitCode);
}
项目:cloud-bigtable-client    文件:TestImport.java   
@Test
@Category(KnownGap.class)
public void testMapReduce() throws IOException, ClassNotFoundException, InterruptedException {
  Table oldTable = getConnection().getTable(TABLE_NAME);

  // Put a value.
  byte[] rowKey = dataHelper.randomData("testrow-");
  byte[] qual = dataHelper.randomData("testQualifier-");
  byte[] value = dataHelper.randomData("testValue-");
  Put put = new Put(rowKey);
  put.addColumn(COLUMN_FAMILY, qual, value);
  oldTable.put(put);

  // Assert the value is there.
  Get get = new Get(rowKey);
  Result result = oldTable.get(get);
  List<Cell> cells = result.listCells();
  Assert.assertEquals(1, cells.size());
  Assert.assertArrayEquals(CellUtil.cloneValue(cells.get(0)), value);

  // Run the export.
  Configuration conf = getConnection().getConfiguration();

  //conf.set("fs.defaultFS", "file:///");
  FileSystem dfs = IntegrationTests.getMiniCluster().getFileSystem();
  String tempDir = "hdfs://" + dfs.getCanonicalServiceName() + "/tmp/backup";

  String[] args = new String[]{
      TABLE_NAME.getNameAsString(),
      tempDir
  };
  Job job = Export.createSubmittableJob(conf, args);
  // So it looks for jars in the local FS, not HDFS.
  job.getConfiguration().set("fs.defaultFS", "file:///");
  Assert.assertTrue(job.waitForCompletion(true));

  // Create new table.
  TableName newTableName = IntegrationTests.newTestTableName();
  Table newTable = getConnection().getTable(newTableName);

  // Change for method in IntegrationTests
  Admin admin = getConnection().getAdmin();
  HColumnDescriptor hcd = new HColumnDescriptor(IntegrationTests.COLUMN_FAMILY);
  HTableDescriptor htd = new HTableDescriptor(newTableName);
  htd.addFamily(hcd);
  admin.createTable(htd);

  // Run the import.
  args = new String[]{
      newTableName.getNameAsString(),
      tempDir
  };
  job = Import.createSubmittableJob(conf, args);
  job.getConfiguration().set("fs.defaultFS", "file:///");
  Assert.assertTrue(job.waitForCompletion(true));

  // Assert the value is there.
  get = new Get(rowKey);
  result = newTable.get(get);
  cells = result.listCells();
  Assert.assertEquals(1, cells.size());
  Assert.assertArrayEquals(CellUtil.cloneValue(cells.get(0)), value);
}