Java 类org.apache.hadoop.mapred.lib.db.DBInputFormat.NullDBWritable 实例源码

项目:hadoop    文件:TestDBInputFormat.java   
/**
 * test DBInputFormat class. Class should split result for chunks
 * @throws Exception
 */
@Test(timeout = 10000)
public void testDBInputFormat() throws Exception {
  JobConf configuration = new JobConf();
  setupDriver(configuration);

  DBInputFormat<NullDBWritable> format = new DBInputFormat<NullDBWritable>();
  format.setConf(configuration);
  format.setConf(configuration);
  DBInputFormat.DBInputSplit splitter = new DBInputFormat.DBInputSplit(1, 10);
  Reporter reporter = mock(Reporter.class);
  RecordReader<LongWritable, NullDBWritable> reader = format.getRecordReader(
      splitter, configuration, reporter);

  configuration.setInt(MRJobConfig.NUM_MAPS, 3);
  InputSplit[] lSplits = format.getSplits(configuration, 3);
  assertEquals(5, lSplits[0].getLength());
  assertEquals(3, lSplits.length);

  // test reader .Some simple tests
  assertEquals(LongWritable.class, reader.createKey().getClass());
  assertEquals(0, reader.getPos());
  assertEquals(0, reader.getProgress(), 0.001);
  reader.close();
}
项目:hadoop    文件:TestDBInputFormat.java   
/**
 * 
 * test DBRecordReader. This reader should creates keys, values, know about position.. 
 */
@SuppressWarnings("unchecked")
@Test (timeout = 5000)
public void testDBRecordReader() throws Exception {

  JobConf job = mock(JobConf.class);
  DBConfiguration dbConfig = mock(DBConfiguration.class);
  String[] fields = { "field1", "filed2" };

  @SuppressWarnings("rawtypes")
  DBRecordReader reader = new DBInputFormat<NullDBWritable>().new DBRecordReader(
      new DBInputSplit(),  NullDBWritable.class, job,
      DriverForTest.getConnection(), dbConfig, "condition", fields, "table");
  LongWritable key = reader.createKey();
  assertEquals(0, key.get());
  DBWritable value = reader.createValue();
  assertEquals(
      "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable", value
          .getClass().getName());
  assertEquals(0, reader.getPos());
  assertFalse(reader.next(key, value));

}
项目:aliyun-oss-hadoop-fs    文件:TestDBInputFormat.java   
/**
 * test DBInputFormat class. Class should split result for chunks
 * @throws Exception
 */
@Test(timeout = 10000)
public void testDBInputFormat() throws Exception {
  JobConf configuration = new JobConf();
  setupDriver(configuration);

  DBInputFormat<NullDBWritable> format = new DBInputFormat<NullDBWritable>();
  format.setConf(configuration);
  format.setConf(configuration);
  DBInputFormat.DBInputSplit splitter = new DBInputFormat.DBInputSplit(1, 10);
  Reporter reporter = mock(Reporter.class);
  RecordReader<LongWritable, NullDBWritable> reader = format.getRecordReader(
      splitter, configuration, reporter);

  configuration.setInt(MRJobConfig.NUM_MAPS, 3);
  InputSplit[] lSplits = format.getSplits(configuration, 3);
  assertEquals(5, lSplits[0].getLength());
  assertEquals(3, lSplits.length);

  // test reader .Some simple tests
  assertEquals(LongWritable.class, reader.createKey().getClass());
  assertEquals(0, reader.getPos());
  assertEquals(0, reader.getProgress(), 0.001);
  reader.close();
}
项目:aliyun-oss-hadoop-fs    文件:TestDBInputFormat.java   
/**
 * 
 * test DBRecordReader. This reader should creates keys, values, know about position.. 
 */
@SuppressWarnings("unchecked")
@Test (timeout = 5000)
public void testDBRecordReader() throws Exception {

  JobConf job = mock(JobConf.class);
  DBConfiguration dbConfig = mock(DBConfiguration.class);
  String[] fields = { "field1", "filed2" };

  @SuppressWarnings("rawtypes")
  DBRecordReader reader = new DBInputFormat<NullDBWritable>().new DBRecordReader(
      new DBInputSplit(),  NullDBWritable.class, job,
      DriverForTest.getConnection(), dbConfig, "condition", fields, "table");
  LongWritable key = reader.createKey();
  assertEquals(0, key.get());
  DBWritable value = reader.createValue();
  assertEquals(
      "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable", value
          .getClass().getName());
  assertEquals(0, reader.getPos());
  assertFalse(reader.next(key, value));

}
项目:big-c    文件:TestDBInputFormat.java   
/**
 * test DBInputFormat class. Class should split result for chunks
 * @throws Exception
 */
@Test(timeout = 10000)
public void testDBInputFormat() throws Exception {
  JobConf configuration = new JobConf();
  setupDriver(configuration);

  DBInputFormat<NullDBWritable> format = new DBInputFormat<NullDBWritable>();
  format.setConf(configuration);
  format.setConf(configuration);
  DBInputFormat.DBInputSplit splitter = new DBInputFormat.DBInputSplit(1, 10);
  Reporter reporter = mock(Reporter.class);
  RecordReader<LongWritable, NullDBWritable> reader = format.getRecordReader(
      splitter, configuration, reporter);

  configuration.setInt(MRJobConfig.NUM_MAPS, 3);
  InputSplit[] lSplits = format.getSplits(configuration, 3);
  assertEquals(5, lSplits[0].getLength());
  assertEquals(3, lSplits.length);

  // test reader .Some simple tests
  assertEquals(LongWritable.class, reader.createKey().getClass());
  assertEquals(0, reader.getPos());
  assertEquals(0, reader.getProgress(), 0.001);
  reader.close();
}
项目:big-c    文件:TestDBInputFormat.java   
/**
 * 
 * test DBRecordReader. This reader should creates keys, values, know about position.. 
 */
@SuppressWarnings("unchecked")
@Test (timeout = 5000)
public void testDBRecordReader() throws Exception {

  JobConf job = mock(JobConf.class);
  DBConfiguration dbConfig = mock(DBConfiguration.class);
  String[] fields = { "field1", "filed2" };

  @SuppressWarnings("rawtypes")
  DBRecordReader reader = new DBInputFormat<NullDBWritable>().new DBRecordReader(
      new DBInputSplit(),  NullDBWritable.class, job,
      DriverForTest.getConnection(), dbConfig, "condition", fields, "table");
  LongWritable key = reader.createKey();
  assertEquals(0, key.get());
  DBWritable value = reader.createValue();
  assertEquals(
      "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable", value
          .getClass().getName());
  assertEquals(0, reader.getPos());
  assertFalse(reader.next(key, value));

}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestDBInputFormat.java   
/**
 * test DBInputFormat class. Class should split result for chunks
 * @throws Exception
 */
@Test(timeout = 10000)
public void testDBInputFormat() throws Exception {
  JobConf configuration = new JobConf();
  setupDriver(configuration);

  DBInputFormat<NullDBWritable> format = new DBInputFormat<NullDBWritable>();
  format.setConf(configuration);
  format.setConf(configuration);
  DBInputFormat.DBInputSplit splitter = new DBInputFormat.DBInputSplit(1, 10);
  Reporter reporter = mock(Reporter.class);
  RecordReader<LongWritable, NullDBWritable> reader = format.getRecordReader(
      splitter, configuration, reporter);

  configuration.setInt(MRJobConfig.NUM_MAPS, 3);
  InputSplit[] lSplits = format.getSplits(configuration, 3);
  assertEquals(5, lSplits[0].getLength());
  assertEquals(3, lSplits.length);

  // test reader .Some simple tests
  assertEquals(LongWritable.class, reader.createKey().getClass());
  assertEquals(0, reader.getPos());
  assertEquals(0, reader.getProgress(), 0.001);
  reader.close();
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestDBInputFormat.java   
/**
 * 
 * test DBRecordReader. This reader should creates keys, values, know about position.. 
 */
@SuppressWarnings("unchecked")
@Test (timeout = 5000)
public void testDBRecordReader() throws Exception {

  JobConf job = mock(JobConf.class);
  DBConfiguration dbConfig = mock(DBConfiguration.class);
  String[] fields = { "field1", "filed2" };

  @SuppressWarnings("rawtypes")
  DBRecordReader reader = new DBInputFormat<NullDBWritable>().new DBRecordReader(
      new DBInputSplit(),  NullDBWritable.class, job,
      DriverForTest.getConnection(), dbConfig, "condition", fields, "table");
  LongWritable key = reader.createKey();
  assertEquals(0, key.get());
  DBWritable value = reader.createValue();
  assertEquals(
      "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable", value
          .getClass().getName());
  assertEquals(0, reader.getPos());
  assertFalse(reader.next(key, value));

}
项目:FlexMap    文件:TestDBInputFormat.java   
/**
 * test DBInputFormat class. Class should split result for chunks
 * @throws Exception
 */
@Test(timeout = 10000)
public void testDBInputFormat() throws Exception {
  JobConf configuration = new JobConf();
  setupDriver(configuration);

  DBInputFormat<NullDBWritable> format = new DBInputFormat<NullDBWritable>();
  format.setConf(configuration);
  format.setConf(configuration);
  DBInputFormat.DBInputSplit splitter = new DBInputFormat.DBInputSplit(1, 10);
  Reporter reporter = mock(Reporter.class);
  RecordReader<LongWritable, NullDBWritable> reader = format.getRecordReader(
      splitter, configuration, reporter);

  configuration.setInt(MRJobConfig.NUM_MAPS, 3);
  InputSplit[] lSplits = format.getSplits(configuration, 3);
  assertEquals(5, lSplits[0].getLength());
  assertEquals(3, lSplits.length);

  // test reader .Some simple tests
  assertEquals(LongWritable.class, reader.createKey().getClass());
  assertEquals(0, reader.getPos());
  assertEquals(0, reader.getProgress(), 0.001);
  reader.close();
}
项目:FlexMap    文件:TestDBInputFormat.java   
/**
 * 
 * test DBRecordReader. This reader should creates keys, values, know about position.. 
 */
@SuppressWarnings("unchecked")
@Test (timeout = 5000)
public void testDBRecordReader() throws Exception {

  JobConf job = mock(JobConf.class);
  DBConfiguration dbConfig = mock(DBConfiguration.class);
  String[] fields = { "field1", "filed2" };

  @SuppressWarnings("rawtypes")
  DBRecordReader reader = new DBInputFormat<NullDBWritable>().new DBRecordReader(
      new DBInputSplit(),  NullDBWritable.class, job,
      DriverForTest.getConnection(), dbConfig, "condition", fields, "table");
  LongWritable key = reader.createKey();
  assertEquals(0, key.get());
  DBWritable value = reader.createValue();
  assertEquals(
      "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable", value
          .getClass().getName());
  assertEquals(0, reader.getPos());
  assertFalse(reader.next(key, value));

}
项目:hops    文件:TestDBInputFormat.java   
/**
 * test DBInputFormat class. Class should split result for chunks
 * @throws Exception
 */
@Test(timeout = 10000)
public void testDBInputFormat() throws Exception {
  JobConf configuration = new JobConf();
  setupDriver(configuration);

  DBInputFormat<NullDBWritable> format = new DBInputFormat<NullDBWritable>();
  format.setConf(configuration);
  format.setConf(configuration);
  DBInputFormat.DBInputSplit splitter = new DBInputFormat.DBInputSplit(1, 10);
  Reporter reporter = mock(Reporter.class);
  RecordReader<LongWritable, NullDBWritable> reader = format.getRecordReader(
      splitter, configuration, reporter);

  configuration.setInt(MRJobConfig.NUM_MAPS, 3);
  InputSplit[] lSplits = format.getSplits(configuration, 3);
  assertEquals(5, lSplits[0].getLength());
  assertEquals(3, lSplits.length);

  // test reader .Some simple tests
  assertEquals(LongWritable.class, reader.createKey().getClass());
  assertEquals(0, reader.getPos());
  assertEquals(0, reader.getProgress(), 0.001);
  reader.close();
}
项目:hops    文件:TestDBInputFormat.java   
/**
 * 
 * test DBRecordReader. This reader should creates keys, values, know about position.. 
 */
@SuppressWarnings("unchecked")
@Test (timeout = 5000)
public void testDBRecordReader() throws Exception {

  JobConf job = mock(JobConf.class);
  DBConfiguration dbConfig = mock(DBConfiguration.class);
  String[] fields = { "field1", "filed2" };

  @SuppressWarnings("rawtypes")
  DBRecordReader reader = new DBInputFormat<NullDBWritable>().new DBRecordReader(
      new DBInputSplit(),  NullDBWritable.class, job,
      DriverForTest.getConnection(), dbConfig, "condition", fields, "table");
  LongWritable key = reader.createKey();
  assertEquals(0, key.get());
  DBWritable value = reader.createValue();
  assertEquals(
      "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable", value
          .getClass().getName());
  assertEquals(0, reader.getPos());
  assertFalse(reader.next(key, value));

}
项目:hadoop-on-lustre2    文件:TestDBInputFormat.java   
/**
 * test DBInputFormat class. Class should split result for chunks
 * @throws Exception
 */
@Test(timeout = 10000)
public void testDBInputFormat() throws Exception {
  JobConf configuration = new JobConf();
  setupDriver(configuration);

  DBInputFormat<NullDBWritable> format = new DBInputFormat<NullDBWritable>();
  format.setConf(configuration);
  format.setConf(configuration);
  DBInputFormat.DBInputSplit splitter = new DBInputFormat.DBInputSplit(1, 10);
  Reporter reporter = mock(Reporter.class);
  RecordReader<LongWritable, NullDBWritable> reader = format.getRecordReader(
      splitter, configuration, reporter);

  configuration.setInt(MRJobConfig.NUM_MAPS, 3);
  InputSplit[] lSplits = format.getSplits(configuration, 3);
  assertEquals(5, lSplits[0].getLength());
  assertEquals(3, lSplits.length);

  // test reader .Some simple tests
  assertEquals(LongWritable.class, reader.createKey().getClass());
  assertEquals(0, reader.getPos());
  assertEquals(0, reader.getProgress(), 0.001);
  reader.close();
}
项目:hadoop-on-lustre2    文件:TestDBInputFormat.java   
/**
 * 
 * test DBRecordReader. This reader should creates keys, values, know about position.. 
 */
@SuppressWarnings("unchecked")
@Test (timeout = 5000)
public void testDBRecordReader() throws Exception {

  JobConf job = mock(JobConf.class);
  DBConfiguration dbConfig = mock(DBConfiguration.class);
  String[] fields = { "field1", "filed2" };

  @SuppressWarnings("rawtypes")
  DBRecordReader reader = new DBInputFormat<NullDBWritable>().new DBRecordReader(
      new DBInputSplit(),  NullDBWritable.class, job,
      DriverForTest.getConnection(), dbConfig, "condition", fields, "table");
  LongWritable key = reader.createKey();
  assertEquals(0, key.get());
  DBWritable value = reader.createValue();
  assertEquals(
      "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable", value
          .getClass().getName());
  assertEquals(0, reader.getPos());
  assertFalse(reader.next(key, value));

}
项目:aliyun-oss-hadoop-fs    文件:TestDBInputFormat.java   
/** 
 * test configuration for db. should works DBConfiguration.* parameters. 
 */
@Test (timeout = 5000)
public void testSetInput() {
  JobConf configuration = new JobConf();

  String[] fieldNames = { "field1", "field2" };
  DBInputFormat.setInput(configuration, NullDBWritable.class, "table",
      "conditions", "orderBy", fieldNames);
  assertEquals(
      "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable",
      configuration.getClass(DBConfiguration.INPUT_CLASS_PROPERTY, null)
          .getName());
  assertEquals("table",
      configuration.get(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, null));

  String[] fields = configuration
      .getStrings(DBConfiguration.INPUT_FIELD_NAMES_PROPERTY);
  assertEquals("field1", fields[0]);
  assertEquals("field2", fields[1]);

  assertEquals("conditions",
      configuration.get(DBConfiguration.INPUT_CONDITIONS_PROPERTY, null));
  assertEquals("orderBy",
      configuration.get(DBConfiguration.INPUT_ORDER_BY_PROPERTY, null));

  configuration = new JobConf();

  DBInputFormat.setInput(configuration, NullDBWritable.class, "query",
      "countQuery");
  assertEquals("query", configuration.get(DBConfiguration.INPUT_QUERY, null));
  assertEquals("countQuery",
      configuration.get(DBConfiguration.INPUT_COUNT_QUERY, null));

  JobConf jConfiguration = new JobConf();
  DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl", "user",
      "password");
  assertEquals("driverClass",
      jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
  assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
  assertEquals("user", jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
  assertEquals("password",
      jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
  jConfiguration = new JobConf();
  DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl");
  assertEquals("driverClass",
      jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
  assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
  assertNull(jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
  assertNull(jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
}
项目:big-c    文件:TestDBInputFormat.java   
/** 
 * test configuration for db. should works DBConfiguration.* parameters. 
 */
@Test (timeout = 5000)
public void testSetInput() {
  JobConf configuration = new JobConf();

  String[] fieldNames = { "field1", "field2" };
  DBInputFormat.setInput(configuration, NullDBWritable.class, "table",
      "conditions", "orderBy", fieldNames);
  assertEquals(
      "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable",
      configuration.getClass(DBConfiguration.INPUT_CLASS_PROPERTY, null)
          .getName());
  assertEquals("table",
      configuration.get(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, null));

  String[] fields = configuration
      .getStrings(DBConfiguration.INPUT_FIELD_NAMES_PROPERTY);
  assertEquals("field1", fields[0]);
  assertEquals("field2", fields[1]);

  assertEquals("conditions",
      configuration.get(DBConfiguration.INPUT_CONDITIONS_PROPERTY, null));
  assertEquals("orderBy",
      configuration.get(DBConfiguration.INPUT_ORDER_BY_PROPERTY, null));

  configuration = new JobConf();

  DBInputFormat.setInput(configuration, NullDBWritable.class, "query",
      "countQuery");
  assertEquals("query", configuration.get(DBConfiguration.INPUT_QUERY, null));
  assertEquals("countQuery",
      configuration.get(DBConfiguration.INPUT_COUNT_QUERY, null));

  JobConf jConfiguration = new JobConf();
  DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl", "user",
      "password");
  assertEquals("driverClass",
      jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
  assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
  assertEquals("user", jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
  assertEquals("password",
      jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
  jConfiguration = new JobConf();
  DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl");
  assertEquals("driverClass",
      jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
  assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
  assertNull(jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
  assertNull(jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestDBInputFormat.java   
/** 
 * test configuration for db. should works DBConfiguration.* parameters. 
 */
@Test (timeout = 5000)
public void testSetInput() {
  JobConf configuration = new JobConf();

  String[] fieldNames = { "field1", "field2" };
  DBInputFormat.setInput(configuration, NullDBWritable.class, "table",
      "conditions", "orderBy", fieldNames);
  assertEquals(
      "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable",
      configuration.getClass(DBConfiguration.INPUT_CLASS_PROPERTY, null)
          .getName());
  assertEquals("table",
      configuration.get(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, null));

  String[] fields = configuration
      .getStrings(DBConfiguration.INPUT_FIELD_NAMES_PROPERTY);
  assertEquals("field1", fields[0]);
  assertEquals("field2", fields[1]);

  assertEquals("conditions",
      configuration.get(DBConfiguration.INPUT_CONDITIONS_PROPERTY, null));
  assertEquals("orderBy",
      configuration.get(DBConfiguration.INPUT_ORDER_BY_PROPERTY, null));

  configuration = new JobConf();

  DBInputFormat.setInput(configuration, NullDBWritable.class, "query",
      "countQuery");
  assertEquals("query", configuration.get(DBConfiguration.INPUT_QUERY, null));
  assertEquals("countQuery",
      configuration.get(DBConfiguration.INPUT_COUNT_QUERY, null));

  JobConf jConfiguration = new JobConf();
  DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl", "user",
      "password");
  assertEquals("driverClass",
      jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
  assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
  assertEquals("user", jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
  assertEquals("password",
      jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
  jConfiguration = new JobConf();
  DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl");
  assertEquals("driverClass",
      jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
  assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
  assertNull(jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
  assertNull(jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
}
项目:hadoop-EAR    文件:DBConfiguration.java   
Class<?> getInputClass() {
  return job.getClass(DBConfiguration.INPUT_CLASS_PROPERTY, NullDBWritable.class);
}
项目:FlexMap    文件:TestDBInputFormat.java   
/** 
 * test configuration for db. should works DBConfiguration.* parameters. 
 */
@Test (timeout = 5000)
public void testSetInput() {
  JobConf configuration = new JobConf();

  String[] fieldNames = { "field1", "field2" };
  DBInputFormat.setInput(configuration, NullDBWritable.class, "table",
      "conditions", "orderBy", fieldNames);
  assertEquals(
      "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable",
      configuration.getClass(DBConfiguration.INPUT_CLASS_PROPERTY, null)
          .getName());
  assertEquals("table",
      configuration.get(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, null));

  String[] fields = configuration
      .getStrings(DBConfiguration.INPUT_FIELD_NAMES_PROPERTY);
  assertEquals("field1", fields[0]);
  assertEquals("field2", fields[1]);

  assertEquals("conditions",
      configuration.get(DBConfiguration.INPUT_CONDITIONS_PROPERTY, null));
  assertEquals("orderBy",
      configuration.get(DBConfiguration.INPUT_ORDER_BY_PROPERTY, null));

  configuration = new JobConf();

  DBInputFormat.setInput(configuration, NullDBWritable.class, "query",
      "countQuery");
  assertEquals("query", configuration.get(DBConfiguration.INPUT_QUERY, null));
  assertEquals("countQuery",
      configuration.get(DBConfiguration.INPUT_COUNT_QUERY, null));

  JobConf jConfiguration = new JobConf();
  DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl", "user",
      "password");
  assertEquals("driverClass",
      jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
  assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
  assertEquals("user", jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
  assertEquals("password",
      jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
  jConfiguration = new JobConf();
  DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl");
  assertEquals("driverClass",
      jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
  assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
  assertNull(jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
  assertNull(jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
}
项目:hops    文件:TestDBInputFormat.java   
/** 
 * test configuration for db. should works DBConfiguration.* parameters. 
 */
@Test (timeout = 5000)
public void testSetInput() {
  JobConf configuration = new JobConf();

  String[] fieldNames = { "field1", "field2" };
  DBInputFormat.setInput(configuration, NullDBWritable.class, "table",
      "conditions", "orderBy", fieldNames);
  assertEquals(
      "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable",
      configuration.getClass(DBConfiguration.INPUT_CLASS_PROPERTY, null)
          .getName());
  assertEquals("table",
      configuration.get(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, null));

  String[] fields = configuration
      .getStrings(DBConfiguration.INPUT_FIELD_NAMES_PROPERTY);
  assertEquals("field1", fields[0]);
  assertEquals("field2", fields[1]);

  assertEquals("conditions",
      configuration.get(DBConfiguration.INPUT_CONDITIONS_PROPERTY, null));
  assertEquals("orderBy",
      configuration.get(DBConfiguration.INPUT_ORDER_BY_PROPERTY, null));

  configuration = new JobConf();

  DBInputFormat.setInput(configuration, NullDBWritable.class, "query",
      "countQuery");
  assertEquals("query", configuration.get(DBConfiguration.INPUT_QUERY, null));
  assertEquals("countQuery",
      configuration.get(DBConfiguration.INPUT_COUNT_QUERY, null));

  JobConf jConfiguration = new JobConf();
  DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl", "user",
      "password");
  assertEquals("driverClass",
      jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
  assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
  assertEquals("user", jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
  assertEquals("password",
      jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
  jConfiguration = new JobConf();
  DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl");
  assertEquals("driverClass",
      jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
  assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
  assertNull(jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
  assertNull(jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
}
项目:hadoop-on-lustre    文件:DBConfiguration.java   
Class<?> getInputClass() {
  return job.getClass(DBConfiguration.INPUT_CLASS_PROPERTY, NullDBWritable.class);
}
项目:hadoop-on-lustre2    文件:TestDBInputFormat.java   
/** 
 * test configuration for db. should works DBConfiguration.* parameters. 
 */
@Test (timeout = 5000)
public void testSetInput() {
  JobConf configuration = new JobConf();

  String[] fieldNames = { "field1", "field2" };
  DBInputFormat.setInput(configuration, NullDBWritable.class, "table",
      "conditions", "orderBy", fieldNames);
  assertEquals(
      "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable",
      configuration.getClass(DBConfiguration.INPUT_CLASS_PROPERTY, null)
          .getName());
  assertEquals("table",
      configuration.get(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, null));

  String[] fields = configuration
      .getStrings(DBConfiguration.INPUT_FIELD_NAMES_PROPERTY);
  assertEquals("field1", fields[0]);
  assertEquals("field2", fields[1]);

  assertEquals("conditions",
      configuration.get(DBConfiguration.INPUT_CONDITIONS_PROPERTY, null));
  assertEquals("orderBy",
      configuration.get(DBConfiguration.INPUT_ORDER_BY_PROPERTY, null));

  configuration = new JobConf();

  DBInputFormat.setInput(configuration, NullDBWritable.class, "query",
      "countQuery");
  assertEquals("query", configuration.get(DBConfiguration.INPUT_QUERY, null));
  assertEquals("countQuery",
      configuration.get(DBConfiguration.INPUT_COUNT_QUERY, null));

  JobConf jConfiguration = new JobConf();
  DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl", "user",
      "password");
  assertEquals("driverClass",
      jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
  assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
  assertEquals("user", jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
  assertEquals("password",
      jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
  jConfiguration = new JobConf();
  DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl");
  assertEquals("driverClass",
      jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
  assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
  assertNull(jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
  assertNull(jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
}
项目:RDFS    文件:DBConfiguration.java   
Class<?> getInputClass() {
  return job.getClass(DBConfiguration.INPUT_CLASS_PROPERTY, NullDBWritable.class);
}
项目:hadoop-0.20    文件:DBConfiguration.java   
Class<?> getInputClass() {
  return job.getClass(DBConfiguration.INPUT_CLASS_PROPERTY, NullDBWritable.class);
}
项目:hortonworks-extension    文件:DBConfiguration.java   
Class<?> getInputClass() {
  return job.getClass(DBConfiguration.INPUT_CLASS_PROPERTY, NullDBWritable.class);
}
项目:hortonworks-extension    文件:DBConfiguration.java   
Class<?> getInputClass() {
  return job.getClass(DBConfiguration.INPUT_CLASS_PROPERTY, NullDBWritable.class);
}
项目:hadoop-gpu    文件:DBConfiguration.java   
Class<?> getInputClass() {
  return job.getClass(DBConfiguration.INPUT_CLASS_PROPERTY, NullDBWritable.class);
}