Java 类org.apache.hadoop.mapred.lib.db.DBConfiguration 实例源码

项目:hadoop    文件:TestDBInputFormat.java   
/**
 * 
 * test DBRecordReader. This reader should creates keys, values, know about position.. 
 */
@SuppressWarnings("unchecked")
@Test (timeout = 5000)
public void testDBRecordReader() throws Exception {

  JobConf job = mock(JobConf.class);
  DBConfiguration dbConfig = mock(DBConfiguration.class);
  String[] fields = { "field1", "filed2" };

  @SuppressWarnings("rawtypes")
  DBRecordReader reader = new DBInputFormat<NullDBWritable>().new DBRecordReader(
      new DBInputSplit(),  NullDBWritable.class, job,
      DriverForTest.getConnection(), dbConfig, "condition", fields, "table");
  LongWritable key = reader.createKey();
  assertEquals(0, key.get());
  DBWritable value = reader.createValue();
  assertEquals(
      "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable", value
          .getClass().getName());
  assertEquals(0, reader.getPos());
  assertFalse(reader.next(key, value));

}
项目:aliyun-oss-hadoop-fs    文件:TestDBInputFormat.java   
/**
 * 
 * test DBRecordReader. This reader should creates keys, values, know about position.. 
 */
@SuppressWarnings("unchecked")
@Test (timeout = 5000)
public void testDBRecordReader() throws Exception {

  JobConf job = mock(JobConf.class);
  DBConfiguration dbConfig = mock(DBConfiguration.class);
  String[] fields = { "field1", "filed2" };

  @SuppressWarnings("rawtypes")
  DBRecordReader reader = new DBInputFormat<NullDBWritable>().new DBRecordReader(
      new DBInputSplit(),  NullDBWritable.class, job,
      DriverForTest.getConnection(), dbConfig, "condition", fields, "table");
  LongWritable key = reader.createKey();
  assertEquals(0, key.get());
  DBWritable value = reader.createValue();
  assertEquals(
      "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable", value
          .getClass().getName());
  assertEquals(0, reader.getPos());
  assertFalse(reader.next(key, value));

}
项目:big-c    文件:TestDBInputFormat.java   
/**
 * 
 * test DBRecordReader. This reader should creates keys, values, know about position.. 
 */
@SuppressWarnings("unchecked")
@Test (timeout = 5000)
public void testDBRecordReader() throws Exception {

  JobConf job = mock(JobConf.class);
  DBConfiguration dbConfig = mock(DBConfiguration.class);
  String[] fields = { "field1", "filed2" };

  @SuppressWarnings("rawtypes")
  DBRecordReader reader = new DBInputFormat<NullDBWritable>().new DBRecordReader(
      new DBInputSplit(),  NullDBWritable.class, job,
      DriverForTest.getConnection(), dbConfig, "condition", fields, "table");
  LongWritable key = reader.createKey();
  assertEquals(0, key.get());
  DBWritable value = reader.createValue();
  assertEquals(
      "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable", value
          .getClass().getName());
  assertEquals(0, reader.getPos());
  assertFalse(reader.next(key, value));

}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestDBInputFormat.java   
/**
 * 
 * test DBRecordReader. This reader should creates keys, values, know about position.. 
 */
@SuppressWarnings("unchecked")
@Test (timeout = 5000)
public void testDBRecordReader() throws Exception {

  JobConf job = mock(JobConf.class);
  DBConfiguration dbConfig = mock(DBConfiguration.class);
  String[] fields = { "field1", "filed2" };

  @SuppressWarnings("rawtypes")
  DBRecordReader reader = new DBInputFormat<NullDBWritable>().new DBRecordReader(
      new DBInputSplit(),  NullDBWritable.class, job,
      DriverForTest.getConnection(), dbConfig, "condition", fields, "table");
  LongWritable key = reader.createKey();
  assertEquals(0, key.get());
  DBWritable value = reader.createValue();
  assertEquals(
      "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable", value
          .getClass().getName());
  assertEquals(0, reader.getPos());
  assertFalse(reader.next(key, value));

}
项目:FlexMap    文件:TestDBInputFormat.java   
/**
 * 
 * test DBRecordReader. This reader should creates keys, values, know about position.. 
 */
@SuppressWarnings("unchecked")
@Test (timeout = 5000)
public void testDBRecordReader() throws Exception {

  JobConf job = mock(JobConf.class);
  DBConfiguration dbConfig = mock(DBConfiguration.class);
  String[] fields = { "field1", "filed2" };

  @SuppressWarnings("rawtypes")
  DBRecordReader reader = new DBInputFormat<NullDBWritable>().new DBRecordReader(
      new DBInputSplit(),  NullDBWritable.class, job,
      DriverForTest.getConnection(), dbConfig, "condition", fields, "table");
  LongWritable key = reader.createKey();
  assertEquals(0, key.get());
  DBWritable value = reader.createValue();
  assertEquals(
      "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable", value
          .getClass().getName());
  assertEquals(0, reader.getPos());
  assertFalse(reader.next(key, value));

}
项目:hops    文件:TestDBInputFormat.java   
/**
 * 
 * test DBRecordReader. This reader should creates keys, values, know about position.. 
 */
@SuppressWarnings("unchecked")
@Test (timeout = 5000)
public void testDBRecordReader() throws Exception {

  JobConf job = mock(JobConf.class);
  DBConfiguration dbConfig = mock(DBConfiguration.class);
  String[] fields = { "field1", "filed2" };

  @SuppressWarnings("rawtypes")
  DBRecordReader reader = new DBInputFormat<NullDBWritable>().new DBRecordReader(
      new DBInputSplit(),  NullDBWritable.class, job,
      DriverForTest.getConnection(), dbConfig, "condition", fields, "table");
  LongWritable key = reader.createKey();
  assertEquals(0, key.get());
  DBWritable value = reader.createValue();
  assertEquals(
      "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable", value
          .getClass().getName());
  assertEquals(0, reader.getPos());
  assertFalse(reader.next(key, value));

}
项目:hadoop-on-lustre2    文件:TestDBInputFormat.java   
/**
 * 
 * test DBRecordReader. This reader should creates keys, values, know about position.. 
 */
@SuppressWarnings("unchecked")
@Test (timeout = 5000)
public void testDBRecordReader() throws Exception {

  JobConf job = mock(JobConf.class);
  DBConfiguration dbConfig = mock(DBConfiguration.class);
  String[] fields = { "field1", "filed2" };

  @SuppressWarnings("rawtypes")
  DBRecordReader reader = new DBInputFormat<NullDBWritable>().new DBRecordReader(
      new DBInputSplit(),  NullDBWritable.class, job,
      DriverForTest.getConnection(), dbConfig, "condition", fields, "table");
  LongWritable key = reader.createKey();
  assertEquals(0, key.get());
  DBWritable value = reader.createValue();
  assertEquals(
      "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable", value
          .getClass().getName());
  assertEquals(0, reader.getPos());
  assertFalse(reader.next(key, value));

}
项目:HiveJdbcStorageHandler    文件:JdbcStorageHandler.java   
private void configureJobProperties(TableDesc tableDesc, Map<String, String> jobProperties) {
    if(LOG.isDebugEnabled()) {
        LOG.debug("tabelDesc: " + tableDesc);
        LOG.debug("jobProperties: " + jobProperties);
    }

    String tblName = tableDesc.getTableName();
    Properties tblProps = tableDesc.getProperties();
    String columnNames = tblProps.getProperty(Constants.LIST_COLUMNS);
    jobProperties.put(DBConfiguration.INPUT_CLASS_PROPERTY, DbRecordWritable.class.getName());
    jobProperties.put(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, tblName);
    jobProperties.put(DBConfiguration.OUTPUT_TABLE_NAME_PROPERTY, tblName);
    jobProperties.put(DBConfiguration.INPUT_FIELD_NAMES_PROPERTY, columnNames);
    jobProperties.put(DBConfiguration.OUTPUT_FIELD_NAMES_PROPERTY, columnNames);

    for(String key : tblProps.stringPropertyNames()) {
        if(key.startsWith("mapred.jdbc.")) {
            String value = tblProps.getProperty(key);
            jobProperties.put(key, value);
        }
    }
}
项目:hadoop    文件:TestDBInputFormat.java   
private void setupDriver(JobConf configuration) throws Exception {
  configuration.set(DBConfiguration.URL_PROPERTY, "testUrl");
  DriverManager.registerDriver(new DriverForTest());
  configuration.set(DBConfiguration.DRIVER_CLASS_PROPERTY,
      DriverForTest.class.getCanonicalName());
}
项目:aliyun-oss-hadoop-fs    文件:TestDBInputFormat.java   
/** 
 * test configuration for db. should works DBConfiguration.* parameters. 
 */
@Test (timeout = 5000)
public void testSetInput() {
  JobConf configuration = new JobConf();

  String[] fieldNames = { "field1", "field2" };
  DBInputFormat.setInput(configuration, NullDBWritable.class, "table",
      "conditions", "orderBy", fieldNames);
  assertEquals(
      "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable",
      configuration.getClass(DBConfiguration.INPUT_CLASS_PROPERTY, null)
          .getName());
  assertEquals("table",
      configuration.get(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, null));

  String[] fields = configuration
      .getStrings(DBConfiguration.INPUT_FIELD_NAMES_PROPERTY);
  assertEquals("field1", fields[0]);
  assertEquals("field2", fields[1]);

  assertEquals("conditions",
      configuration.get(DBConfiguration.INPUT_CONDITIONS_PROPERTY, null));
  assertEquals("orderBy",
      configuration.get(DBConfiguration.INPUT_ORDER_BY_PROPERTY, null));

  configuration = new JobConf();

  DBInputFormat.setInput(configuration, NullDBWritable.class, "query",
      "countQuery");
  assertEquals("query", configuration.get(DBConfiguration.INPUT_QUERY, null));
  assertEquals("countQuery",
      configuration.get(DBConfiguration.INPUT_COUNT_QUERY, null));

  JobConf jConfiguration = new JobConf();
  DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl", "user",
      "password");
  assertEquals("driverClass",
      jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
  assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
  assertEquals("user", jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
  assertEquals("password",
      jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
  jConfiguration = new JobConf();
  DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl");
  assertEquals("driverClass",
      jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
  assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
  assertNull(jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
  assertNull(jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
}
项目:aliyun-oss-hadoop-fs    文件:TestDBInputFormat.java   
private void setupDriver(JobConf configuration) throws Exception {
  configuration.set(DBConfiguration.URL_PROPERTY, "testUrl");
  DriverManager.registerDriver(new DriverForTest());
  configuration.set(DBConfiguration.DRIVER_CLASS_PROPERTY,
      DriverForTest.class.getCanonicalName());
}
项目:big-c    文件:TestDBInputFormat.java   
/** 
 * test configuration for db. should works DBConfiguration.* parameters. 
 */
@Test (timeout = 5000)
public void testSetInput() {
  JobConf configuration = new JobConf();

  String[] fieldNames = { "field1", "field2" };
  DBInputFormat.setInput(configuration, NullDBWritable.class, "table",
      "conditions", "orderBy", fieldNames);
  assertEquals(
      "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable",
      configuration.getClass(DBConfiguration.INPUT_CLASS_PROPERTY, null)
          .getName());
  assertEquals("table",
      configuration.get(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, null));

  String[] fields = configuration
      .getStrings(DBConfiguration.INPUT_FIELD_NAMES_PROPERTY);
  assertEquals("field1", fields[0]);
  assertEquals("field2", fields[1]);

  assertEquals("conditions",
      configuration.get(DBConfiguration.INPUT_CONDITIONS_PROPERTY, null));
  assertEquals("orderBy",
      configuration.get(DBConfiguration.INPUT_ORDER_BY_PROPERTY, null));

  configuration = new JobConf();

  DBInputFormat.setInput(configuration, NullDBWritable.class, "query",
      "countQuery");
  assertEquals("query", configuration.get(DBConfiguration.INPUT_QUERY, null));
  assertEquals("countQuery",
      configuration.get(DBConfiguration.INPUT_COUNT_QUERY, null));

  JobConf jConfiguration = new JobConf();
  DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl", "user",
      "password");
  assertEquals("driverClass",
      jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
  assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
  assertEquals("user", jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
  assertEquals("password",
      jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
  jConfiguration = new JobConf();
  DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl");
  assertEquals("driverClass",
      jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
  assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
  assertNull(jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
  assertNull(jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
}
项目:big-c    文件:TestDBInputFormat.java   
private void setupDriver(JobConf configuration) throws Exception {
  configuration.set(DBConfiguration.URL_PROPERTY, "testUrl");
  DriverManager.registerDriver(new DriverForTest());
  configuration.set(DBConfiguration.DRIVER_CLASS_PROPERTY,
      DriverForTest.class.getCanonicalName());
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestDBInputFormat.java   
/** 
 * test configuration for db. should works DBConfiguration.* parameters. 
 */
@Test (timeout = 5000)
public void testSetInput() {
  JobConf configuration = new JobConf();

  String[] fieldNames = { "field1", "field2" };
  DBInputFormat.setInput(configuration, NullDBWritable.class, "table",
      "conditions", "orderBy", fieldNames);
  assertEquals(
      "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable",
      configuration.getClass(DBConfiguration.INPUT_CLASS_PROPERTY, null)
          .getName());
  assertEquals("table",
      configuration.get(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, null));

  String[] fields = configuration
      .getStrings(DBConfiguration.INPUT_FIELD_NAMES_PROPERTY);
  assertEquals("field1", fields[0]);
  assertEquals("field2", fields[1]);

  assertEquals("conditions",
      configuration.get(DBConfiguration.INPUT_CONDITIONS_PROPERTY, null));
  assertEquals("orderBy",
      configuration.get(DBConfiguration.INPUT_ORDER_BY_PROPERTY, null));

  configuration = new JobConf();

  DBInputFormat.setInput(configuration, NullDBWritable.class, "query",
      "countQuery");
  assertEquals("query", configuration.get(DBConfiguration.INPUT_QUERY, null));
  assertEquals("countQuery",
      configuration.get(DBConfiguration.INPUT_COUNT_QUERY, null));

  JobConf jConfiguration = new JobConf();
  DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl", "user",
      "password");
  assertEquals("driverClass",
      jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
  assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
  assertEquals("user", jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
  assertEquals("password",
      jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
  jConfiguration = new JobConf();
  DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl");
  assertEquals("driverClass",
      jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
  assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
  assertNull(jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
  assertNull(jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestDBInputFormat.java   
private void setupDriver(JobConf configuration) throws Exception {
  configuration.set(DBConfiguration.URL_PROPERTY, "testUrl");
  DriverManager.registerDriver(new DriverForTest());
  configuration.set(DBConfiguration.DRIVER_CLASS_PROPERTY,
      DriverForTest.class.getCanonicalName());
}
项目:hadoop-2.6.0-cdh5.4.3    文件:DBCountPageView.java   
@Override
//Usage DBCountPageView [driverClass dburl]
public int run(String[] args) throws Exception {

  String driverClassName = DRIVER_CLASS;
  String url = DB_URL;

  if(args.length > 1) {
    driverClassName = args[0];
    url = args[1];
  }

  initialize(driverClassName, url);

  JobConf job = new JobConf(getConf(), DBCountPageView.class);

  job.setJobName("Count Pageviews of URLs");

  job.setMapperClass(PageviewMapper.class);
  job.setCombinerClass(LongSumReducer.class);
  job.setReducerClass(PageviewReducer.class);

  DBConfiguration.configureDB(job, driverClassName, url);

  DBInputFormat.setInput(job, AccessRecord.class, "Access"
      , null, "url", AccessFieldNames);

  DBOutputFormat.setOutput(job, "Pageview", PageviewFieldNames);

  job.setMapOutputKeyClass(Text.class);
  job.setMapOutputValueClass(LongWritable.class);

  job.setOutputKeyClass(PageviewRecord.class);
  job.setOutputValueClass(NullWritable.class);

  try {
    JobClient.runJob(job);

    boolean correct = verify();
    if(!correct) {
      throw new RuntimeException("Evaluation was not correct!");
    }
  } finally {
    shutdown();    
  }
  return 0;
}
项目:hadoop-EAR    文件:DBCountPageView.java   
@Override
//Usage DBCountPageView [driverClass dburl]
public int run(String[] args) throws Exception {

  String driverClassName = DRIVER_CLASS;
  String url = DB_URL;

  if(args.length > 1) {
    driverClassName = args[0];
    url = args[1];
  }

  initialize(driverClassName, url);

  JobConf job = new JobConf(getConf(), DBCountPageView.class);

  job.setJobName("Count Pageviews of URLs");

  job.setMapperClass(PageviewMapper.class);
  job.setCombinerClass(LongSumReducer.class);
  job.setReducerClass(PageviewReducer.class);

  DBConfiguration.configureDB(job, driverClassName, url);

  DBInputFormat.setInput(job, AccessRecord.class, "Access"
      , null, "url", AccessFieldNames);

  DBOutputFormat.setOutput(job, "Pageview", PageviewFieldNames);

  job.setMapOutputKeyClass(Text.class);
  job.setMapOutputValueClass(LongWritable.class);

  job.setOutputKeyClass(PageviewRecord.class);
  job.setOutputValueClass(NullWritable.class);

  try {
    JobClient.runJob(job);

    boolean correct = verify();
    if(!correct) {
      throw new RuntimeException("Evaluation was not correct!");
    }
  } finally {
    shutdown();    
  }
  return 0;
}
项目:hiped2    文件:DBImportMapReduce.java   
/**
 * The MapReduce driver - setup and launch the job.
 *
 * @param args the command-line arguments
 * @return the process exit code
 * @throws Exception if something goes wrong
 */
public int run(final String[] args) throws Exception {

  Cli cli = Cli.builder().setArgs(args).addOptions(CliCommonOpts.OutputFileOption.values()).build();
  int result = cli.runCmd();

  if (result != 0) {
    return result;
  }

  Path output = new Path(cli.getArgValueAsString(CliCommonOpts.OutputFileOption.OUTPUT));

  Configuration conf = super.getConf();

  DBConfiguration.configureDB(conf, "com.mysql.jdbc.Driver",
      "jdbc:mysql://localhost/sqoop_test" +
          "?user=hip_sqoop_user&password=password");

  JobConf job = new JobConf(conf);
  job.setJarByClass(DBImportMapReduce.class);

  job.setInputFormat(DBInputFormat.class);
  job.setOutputFormat(AvroOutputFormat.class);
  AvroJob.setOutputSchema(job, Stock.SCHEMA$);
  job.set(AvroJob.OUTPUT_CODEC, SnappyCodec.class.getName());

  job.setMapperClass(Map.class);

  job.setNumMapTasks(4);
  job.setNumReduceTasks(0);

  job.setMapOutputKeyClass(AvroWrapper.class);
  job.setMapOutputValueClass(NullWritable.class);

  job.setOutputKeyClass(AvroWrapper.class);
  job.setOutputValueClass(NullWritable.class);

  FileOutputFormat.setOutputPath(job, output);

  DBInputFormat.setInput(
      job,
      StockDbWritable.class,
      "select * from stocks",
      "SELECT COUNT(id) FROM stocks");

  RunningJob runningJob = JobClient.runJob(job);

  return runningJob.isSuccessful() ? 0 : 1;
}
项目:FlexMap    文件:TestDBInputFormat.java   
/** 
 * test configuration for db. should works DBConfiguration.* parameters. 
 */
@Test (timeout = 5000)
public void testSetInput() {
  JobConf configuration = new JobConf();

  String[] fieldNames = { "field1", "field2" };
  DBInputFormat.setInput(configuration, NullDBWritable.class, "table",
      "conditions", "orderBy", fieldNames);
  assertEquals(
      "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable",
      configuration.getClass(DBConfiguration.INPUT_CLASS_PROPERTY, null)
          .getName());
  assertEquals("table",
      configuration.get(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, null));

  String[] fields = configuration
      .getStrings(DBConfiguration.INPUT_FIELD_NAMES_PROPERTY);
  assertEquals("field1", fields[0]);
  assertEquals("field2", fields[1]);

  assertEquals("conditions",
      configuration.get(DBConfiguration.INPUT_CONDITIONS_PROPERTY, null));
  assertEquals("orderBy",
      configuration.get(DBConfiguration.INPUT_ORDER_BY_PROPERTY, null));

  configuration = new JobConf();

  DBInputFormat.setInput(configuration, NullDBWritable.class, "query",
      "countQuery");
  assertEquals("query", configuration.get(DBConfiguration.INPUT_QUERY, null));
  assertEquals("countQuery",
      configuration.get(DBConfiguration.INPUT_COUNT_QUERY, null));

  JobConf jConfiguration = new JobConf();
  DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl", "user",
      "password");
  assertEquals("driverClass",
      jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
  assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
  assertEquals("user", jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
  assertEquals("password",
      jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
  jConfiguration = new JobConf();
  DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl");
  assertEquals("driverClass",
      jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
  assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
  assertNull(jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
  assertNull(jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
}
项目:FlexMap    文件:TestDBInputFormat.java   
private void setupDriver(JobConf configuration) throws Exception {
  configuration.set(DBConfiguration.URL_PROPERTY, "testUrl");
  DriverManager.registerDriver(new DriverForTest());
  configuration.set(DBConfiguration.DRIVER_CLASS_PROPERTY,
      DriverForTest.class.getCanonicalName());
}
项目:hops    文件:TestDBInputFormat.java   
/** 
 * test configuration for db. should works DBConfiguration.* parameters. 
 */
@Test (timeout = 5000)
public void testSetInput() {
  JobConf configuration = new JobConf();

  String[] fieldNames = { "field1", "field2" };
  DBInputFormat.setInput(configuration, NullDBWritable.class, "table",
      "conditions", "orderBy", fieldNames);
  assertEquals(
      "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable",
      configuration.getClass(DBConfiguration.INPUT_CLASS_PROPERTY, null)
          .getName());
  assertEquals("table",
      configuration.get(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, null));

  String[] fields = configuration
      .getStrings(DBConfiguration.INPUT_FIELD_NAMES_PROPERTY);
  assertEquals("field1", fields[0]);
  assertEquals("field2", fields[1]);

  assertEquals("conditions",
      configuration.get(DBConfiguration.INPUT_CONDITIONS_PROPERTY, null));
  assertEquals("orderBy",
      configuration.get(DBConfiguration.INPUT_ORDER_BY_PROPERTY, null));

  configuration = new JobConf();

  DBInputFormat.setInput(configuration, NullDBWritable.class, "query",
      "countQuery");
  assertEquals("query", configuration.get(DBConfiguration.INPUT_QUERY, null));
  assertEquals("countQuery",
      configuration.get(DBConfiguration.INPUT_COUNT_QUERY, null));

  JobConf jConfiguration = new JobConf();
  DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl", "user",
      "password");
  assertEquals("driverClass",
      jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
  assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
  assertEquals("user", jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
  assertEquals("password",
      jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
  jConfiguration = new JobConf();
  DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl");
  assertEquals("driverClass",
      jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
  assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
  assertNull(jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
  assertNull(jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
}
项目:hops    文件:TestDBInputFormat.java   
private void setupDriver(JobConf configuration) throws Exception {
  configuration.set(DBConfiguration.URL_PROPERTY, "testUrl");
  DriverManager.registerDriver(new DriverForTest());
  configuration.set(DBConfiguration.DRIVER_CLASS_PROPERTY,
      DriverForTest.class.getCanonicalName());
}
项目:hadoop-on-lustre    文件:DBCountPageView.java   
@Override
//Usage DBCountPageView [driverClass dburl]
public int run(String[] args) throws Exception {

  String driverClassName = DRIVER_CLASS;
  String url = DB_URL;

  if(args.length > 1) {
    driverClassName = args[0];
    url = args[1];
  }

  initialize(driverClassName, url);

  JobConf job = new JobConf(getConf(), DBCountPageView.class);

  job.setJobName("Count Pageviews of URLs");

  job.setMapperClass(PageviewMapper.class);
  job.setCombinerClass(LongSumReducer.class);
  job.setReducerClass(PageviewReducer.class);

  DBConfiguration.configureDB(job, driverClassName, url);

  DBInputFormat.setInput(job, AccessRecord.class, "Access"
      , null, "url", AccessFieldNames);

  DBOutputFormat.setOutput(job, "Pageview", PageviewFieldNames);

  job.setMapOutputKeyClass(Text.class);
  job.setMapOutputValueClass(LongWritable.class);

  job.setOutputKeyClass(PageviewRecord.class);
  job.setOutputValueClass(NullWritable.class);

  try {
    JobClient.runJob(job);

    boolean correct = verify();
    if(!correct) {
      throw new RuntimeException("Evaluation was not correct!");
    }
  } finally {
    shutdown();    
  }
  return 0;
}
项目:hadoop-on-lustre2    文件:TestDBInputFormat.java   
/** 
 * test configuration for db. should works DBConfiguration.* parameters. 
 */
@Test (timeout = 5000)
public void testSetInput() {
  JobConf configuration = new JobConf();

  String[] fieldNames = { "field1", "field2" };
  DBInputFormat.setInput(configuration, NullDBWritable.class, "table",
      "conditions", "orderBy", fieldNames);
  assertEquals(
      "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable",
      configuration.getClass(DBConfiguration.INPUT_CLASS_PROPERTY, null)
          .getName());
  assertEquals("table",
      configuration.get(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, null));

  String[] fields = configuration
      .getStrings(DBConfiguration.INPUT_FIELD_NAMES_PROPERTY);
  assertEquals("field1", fields[0]);
  assertEquals("field2", fields[1]);

  assertEquals("conditions",
      configuration.get(DBConfiguration.INPUT_CONDITIONS_PROPERTY, null));
  assertEquals("orderBy",
      configuration.get(DBConfiguration.INPUT_ORDER_BY_PROPERTY, null));

  configuration = new JobConf();

  DBInputFormat.setInput(configuration, NullDBWritable.class, "query",
      "countQuery");
  assertEquals("query", configuration.get(DBConfiguration.INPUT_QUERY, null));
  assertEquals("countQuery",
      configuration.get(DBConfiguration.INPUT_COUNT_QUERY, null));

  JobConf jConfiguration = new JobConf();
  DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl", "user",
      "password");
  assertEquals("driverClass",
      jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
  assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
  assertEquals("user", jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
  assertEquals("password",
      jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
  jConfiguration = new JobConf();
  DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl");
  assertEquals("driverClass",
      jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
  assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
  assertNull(jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
  assertNull(jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
}
项目:hadoop-on-lustre2    文件:TestDBInputFormat.java   
private void setupDriver(JobConf configuration) throws Exception {
  configuration.set(DBConfiguration.URL_PROPERTY, "testUrl");
  DriverManager.registerDriver(new DriverForTest());
  configuration.set(DBConfiguration.DRIVER_CLASS_PROPERTY,
      DriverForTest.class.getCanonicalName());
}
项目:RDFS    文件:DBCountPageView.java   
@Override
//Usage DBCountPageView [driverClass dburl]
public int run(String[] args) throws Exception {

  String driverClassName = DRIVER_CLASS;
  String url = DB_URL;

  if(args.length > 1) {
    driverClassName = args[0];
    url = args[1];
  }

  initialize(driverClassName, url);

  JobConf job = new JobConf(getConf(), DBCountPageView.class);

  job.setJobName("Count Pageviews of URLs");

  job.setMapperClass(PageviewMapper.class);
  job.setCombinerClass(LongSumReducer.class);
  job.setReducerClass(PageviewReducer.class);

  DBConfiguration.configureDB(job, driverClassName, url);

  DBInputFormat.setInput(job, AccessRecord.class, "Access"
      , null, "url", AccessFieldNames);

  DBOutputFormat.setOutput(job, "Pageview", PageviewFieldNames);

  job.setMapOutputKeyClass(Text.class);
  job.setMapOutputValueClass(LongWritable.class);

  job.setOutputKeyClass(PageviewRecord.class);
  job.setOutputValueClass(NullWritable.class);

  try {
    JobClient.runJob(job);

    boolean correct = verify();
    if(!correct) {
      throw new RuntimeException("Evaluation was not correct!");
    }
  } finally {
    shutdown();    
  }
  return 0;
}
项目:hadoop-0.20    文件:DBCountPageView.java   
@Override
//Usage DBCountPageView [driverClass dburl]
public int run(String[] args) throws Exception {

  String driverClassName = DRIVER_CLASS;
  String url = DB_URL;

  if(args.length > 1) {
    driverClassName = args[0];
    url = args[1];
  }

  initialize(driverClassName, url);

  JobConf job = new JobConf(getConf(), DBCountPageView.class);

  job.setJobName("Count Pageviews of URLs");

  job.setMapperClass(PageviewMapper.class);
  job.setCombinerClass(LongSumReducer.class);
  job.setReducerClass(PageviewReducer.class);

  DBConfiguration.configureDB(job, driverClassName, url);

  DBInputFormat.setInput(job, AccessRecord.class, "Access"
      , null, "url", AccessFieldNames);

  DBOutputFormat.setOutput(job, "Pageview", PageviewFieldNames);

  job.setMapOutputKeyClass(Text.class);
  job.setMapOutputValueClass(LongWritable.class);

  job.setOutputKeyClass(PageviewRecord.class);
  job.setOutputValueClass(NullWritable.class);

  try {
    JobClient.runJob(job);

    boolean correct = verify();
    if(!correct) {
      throw new RuntimeException("Evaluation was not correct!");
    }
  } finally {
    shutdown();    
  }
  return 0;
}
项目:hadoop-book    文件:DBCountPageView.java   
@Override
//Usage DBCountPageView [driverClass dburl]
public int run(String[] args) throws Exception {

    String driverClassName = DRIVER_CLASS;
    String url = DB_URL;

    if (args.length > 1) {
        driverClassName = args[0];
        url = args[1];
    }

    initialize(driverClassName, url);

    JobConf job = new JobConf(getConf(), DBCountPageView.class);

    job.setJobName("Count Pageviews of URLs");

    job.setMapperClass(PageviewMapper.class);
    job.setCombinerClass(LongSumReducer.class);
    job.setReducerClass(PageviewReducer.class);

    DBConfiguration.configureDB(job, driverClassName, url);

    DBInputFormat.setInput(job, AccessRecord.class, "Access", null, "url", AccessFieldNames);

    DBOutputFormat.setOutput(job, "Pageview", PageviewFieldNames);

    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(LongWritable.class);

    job.setOutputKeyClass(PageviewRecord.class);
    job.setOutputValueClass(NullWritable.class);

    try {
        JobClient.runJob(job);

        boolean correct = verify();
        if (!correct) {
            throw new RuntimeException("Evaluation was not correct!");
        }
    } finally {
        shutdown();
    }
    return 0;
}
项目:hanoi-hadoop-2.0.0-cdh    文件:DBCountPageView.java   
@Override
//Usage DBCountPageView [driverClass dburl]
public int run(String[] args) throws Exception {

  String driverClassName = DRIVER_CLASS;
  String url = DB_URL;

  if(args.length > 1) {
    driverClassName = args[0];
    url = args[1];
  }

  initialize(driverClassName, url);

  JobConf job = new JobConf(getConf(), DBCountPageView.class);

  job.setJobName("Count Pageviews of URLs");

  job.setMapperClass(PageviewMapper.class);
  job.setCombinerClass(LongSumReducer.class);
  job.setReducerClass(PageviewReducer.class);

  DBConfiguration.configureDB(job, driverClassName, url);

  DBInputFormat.setInput(job, AccessRecord.class, "Access"
      , null, "url", AccessFieldNames);

  DBOutputFormat.setOutput(job, "Pageview", PageviewFieldNames);

  job.setMapOutputKeyClass(Text.class);
  job.setMapOutputValueClass(LongWritable.class);

  job.setOutputKeyClass(PageviewRecord.class);
  job.setOutputValueClass(NullWritable.class);

  try {
    JobClient.runJob(job);

    boolean correct = verify();
    if(!correct) {
      throw new RuntimeException("Evaluation was not correct!");
    }
  } finally {
    shutdown();    
  }
  return 0;
}
项目:hanoi-hadoop-2.0.0-cdh    文件:DBCountPageView.java   
@Override
//Usage DBCountPageView [driverClass dburl]
public int run(String[] args) throws Exception {

  String driverClassName = DRIVER_CLASS;
  String url = DB_URL;

  if(args.length > 1) {
    driverClassName = args[0];
    url = args[1];
  }

  initialize(driverClassName, url);

  JobConf job = new JobConf(getConf(), DBCountPageView.class);

  job.setJobName("Count Pageviews of URLs");

  job.setMapperClass(PageviewMapper.class);
  job.setCombinerClass(LongSumReducer.class);
  job.setReducerClass(PageviewReducer.class);

  DBConfiguration.configureDB(job, driverClassName, url);

  DBInputFormat.setInput(job, AccessRecord.class, "Access"
      , null, "url", AccessFieldNames);

  DBOutputFormat.setOutput(job, "Pageview", PageviewFieldNames);

  job.setMapOutputKeyClass(Text.class);
  job.setMapOutputValueClass(LongWritable.class);

  job.setOutputKeyClass(PageviewRecord.class);
  job.setOutputValueClass(NullWritable.class);

  try {
    JobClient.runJob(job);

    boolean correct = verify();
    if(!correct) {
      throw new RuntimeException("Evaluation was not correct!");
    }
  } finally {
    shutdown();    
  }
  return 0;
}
项目:hortonworks-extension    文件:DBCountPageView.java   
@Override
//Usage DBCountPageView [driverClass dburl]
public int run(String[] args) throws Exception {

  String driverClassName = DRIVER_CLASS;
  String url = DB_URL;

  if(args.length > 1) {
    driverClassName = args[0];
    url = args[1];
  }

  initialize(driverClassName, url);

  JobConf job = new JobConf(getConf(), DBCountPageView.class);

  job.setJobName("Count Pageviews of URLs");

  job.setMapperClass(PageviewMapper.class);
  job.setCombinerClass(LongSumReducer.class);
  job.setReducerClass(PageviewReducer.class);

  DBConfiguration.configureDB(job, driverClassName, url);

  DBInputFormat.setInput(job, AccessRecord.class, "Access"
      , null, "url", AccessFieldNames);

  DBOutputFormat.setOutput(job, "Pageview", PageviewFieldNames);

  job.setMapOutputKeyClass(Text.class);
  job.setMapOutputValueClass(LongWritable.class);

  job.setOutputKeyClass(PageviewRecord.class);
  job.setOutputValueClass(NullWritable.class);

  try {
    JobClient.runJob(job);

    boolean correct = verify();
    if(!correct) {
      throw new RuntimeException("Evaluation was not correct!");
    }
  } finally {
    shutdown();    
  }
  return 0;
}
项目:hortonworks-extension    文件:DBCountPageView.java   
@Override
//Usage DBCountPageView [driverClass dburl]
public int run(String[] args) throws Exception {

  String driverClassName = DRIVER_CLASS;
  String url = DB_URL;

  if(args.length > 1) {
    driverClassName = args[0];
    url = args[1];
  }

  initialize(driverClassName, url);

  JobConf job = new JobConf(getConf(), DBCountPageView.class);

  job.setJobName("Count Pageviews of URLs");

  job.setMapperClass(PageviewMapper.class);
  job.setCombinerClass(LongSumReducer.class);
  job.setReducerClass(PageviewReducer.class);

  DBConfiguration.configureDB(job, driverClassName, url);

  DBInputFormat.setInput(job, AccessRecord.class, "Access"
      , null, "url", AccessFieldNames);

  DBOutputFormat.setOutput(job, "Pageview", PageviewFieldNames);

  job.setMapOutputKeyClass(Text.class);
  job.setMapOutputValueClass(LongWritable.class);

  job.setOutputKeyClass(PageviewRecord.class);
  job.setOutputValueClass(NullWritable.class);

  try {
    JobClient.runJob(job);

    boolean correct = verify();
    if(!correct) {
      throw new RuntimeException("Evaluation was not correct!");
    }
  } finally {
    shutdown();    
  }
  return 0;
}
项目:hadoop-gpu    文件:DBCountPageView.java   
@Override
//Usage DBCountPageView [driverClass dburl]
public int run(String[] args) throws Exception {

  String driverClassName = DRIVER_CLASS;
  String url = DB_URL;

  if(args.length > 1) {
    driverClassName = args[0];
    url = args[1];
  }

  initialize(driverClassName, url);

  JobConf job = new JobConf(getConf(), DBCountPageView.class);

  job.setJobName("Count Pageviews of URLs");

  job.setMapperClass(PageviewMapper.class);
  job.setCombinerClass(LongSumReducer.class);
  job.setReducerClass(PageviewReducer.class);

  DBConfiguration.configureDB(job, driverClassName, url);

  DBInputFormat.setInput(job, AccessRecord.class, "Access"
      , null, "url", AccessFieldNames);

  DBOutputFormat.setOutput(job, "Pageview", PageviewFieldNames);

  job.setMapOutputKeyClass(Text.class);
  job.setMapOutputValueClass(LongWritable.class);

  job.setOutputKeyClass(PageviewRecord.class);
  job.setOutputValueClass(NullWritable.class);

  try {
    JobClient.runJob(job);

    boolean correct = verify();
    if(!correct) {
      throw new RuntimeException("Evaluation was not correct!");
    }
  } finally {
    shutdown();    
  }
  return 0;
}
项目:logcenter    文件:DBCountPageView.java   
@Override
//Usage DBCountPageView [driverClass dburl]
public int run(String[] args) throws Exception {

  String driverClassName = DRIVER_CLASS;
  String url = DB_URL;

  if(args.length > 1) {
    driverClassName = args[0];
    url = args[1];
  }

  initialize(driverClassName, url);

  JobConf job = new JobConf(getConf(), DBCountPageView.class);

  job.setJobName("Count Pageviews of URLs");

  job.setMapperClass(PageviewMapper.class);
  job.setCombinerClass(LongSumReducer.class);
  job.setReducerClass(PageviewReducer.class);

  DBConfiguration.configureDB(job, driverClassName, url);

  DBInputFormat.setInput(job, AccessRecord.class, "Access"
      , null, "url", AccessFieldNames);

  DBOutputFormat.setOutput(job, "Pageview", PageviewFieldNames);

  job.setMapOutputKeyClass(Text.class);
  job.setMapOutputValueClass(LongWritable.class);

  job.setOutputKeyClass(PageviewRecord.class);
  job.setOutputValueClass(NullWritable.class);

  try {
    JobClient.runJob(job);

    boolean correct = verify();
    if(!correct) {
      throw new RuntimeException("Evaluation was not correct!");
    }
  } finally {
    shutdown();    
  }
  return 0;
}