Java 类org.apache.log4j.BasicConfigurator 实例源码

项目:amr2Fred    文件:RdfWriter.java   
/**
 * Get singleton instance of RdfWriter
 *
 * @return instance of RdfWriter
 */
public static RdfWriter getWriter() {
    if (writer == null) {
        BasicConfigurator.configure();
        writer = new RdfWriter();
    }
    writer.model = ModelFactory.createDefaultModel();
    writer.list = new LinkedBlockingQueue<>();
    return writer;
}
项目:phone-simulator    文件:TesterHost.java   
private void setupLog4j(String appName) {

        // InputStream inStreamLog4j = getClass().getResourceAsStream("/log4j.properties");

        String propFileName = appName + ".log4j.properties";
        File f = new File("./" + propFileName);
        if (f.exists()) {

            try {
                InputStream inStreamLog4j = new FileInputStream(f);
                Properties propertiesLog4j = new Properties();

                propertiesLog4j.load(inStreamLog4j);
                PropertyConfigurator.configure(propertiesLog4j);
            } catch (Exception e) {
                e.printStackTrace();
                BasicConfigurator.configure();
            }
        } else {
            BasicConfigurator.configure();
        }

        // logger.setLevel(Level.TRACE);
        logger.debug("log4j configured");

    }
项目:kubernetes-HDFS    文件:PodCIDRToNodeMapping.java   
public static void main(String[] args) throws ParseException {
  Options options = new Options();
  Option nameOption = new Option("n", true, "Name to resolve");
  nameOption.setRequired(true);
  options.addOption(nameOption);
  CommandLineParser parser = new BasicParser();
  CommandLine cmd = parser.parse(options, args);

  BasicConfigurator.configure();
  Logger.getRootLogger().setLevel(Level.DEBUG);
  PodCIDRToNodeMapping plugin = new PodCIDRToNodeMapping();
  Configuration conf = new Configuration();
  plugin.setConf(conf);

  String nameToResolve = cmd.getOptionValue(nameOption.getOpt());
  List<String> networkPathDirs = plugin.resolve(Lists.newArrayList(nameToResolve));
  log.info("Resolved " + nameToResolve + " to " + networkPathDirs);
}
项目:mpeg-audio-streams    文件:AppTestMPEGAudioFrameInputStream.java   
/**
 * main.
 *
 * @param args
 *            {@link String}{@code []}
 */
public static void main(final String[] args) {
    BasicConfigurator.configure();
    org.apache.log4j.Logger.getRootLogger().setLevel(Level.INFO);

    MP3TestFiles.iterateOverTestFiles(new FileHandler() {
        @Override
        public void handle(final File file) {
            if (AppTestMPEGAudioFrameInputStream.LOG.isInfoEnabled()) {
                AppTestMPEGAudioFrameInputStream.LOG
                        .info("######## Try to decode new file [" + file.getAbsolutePath() + "]."); //$NON-NLS-1$ //$NON-NLS-2$
            }
            decodeMpegAudioFile(file);
        }
    });
}
项目:mpeg-audio-streams    文件:AppTestApplyGainFilter.java   
/**
 * main.
 * @param args {@link String}{@code []}
 * @throws IOException due to file errors.
 */
@SuppressWarnings("nls")
public static void main(final String[] args) throws IOException {

    BasicConfigurator.configure();

    final File in = new File("src/test/mp3/1000Hz.mp3");
    // assume there is just one id 3 tag at the very beginning...
    final ID3Tag firstID3Tag = MPEGAudio.decodeFirstID3Tag(in);
    if (LOG.isInfoEnabled()) {
        LOG.info("" + firstID3Tag); //$NON-NLS-1$
    }
    final DecodingResult dr = MPEGAudio.decode(in, MPEGAudioContentFilter.MPEG_AUDIO_FRAMES);
    //        final File out = File.createTempFile("TestIntegrationReadWrite-TEST", ".mp3"); //$NON-NLS-1$ //$NON-NLS-2$
    final File out = new File("out.mp3");
    if (LOG.isInfoEnabled()) {
        LOG.info("Created tmp file [" + out.getAbsolutePath() + "]"); //$NON-NLS-1$ //$NON-NLS-2$
    }
    try (FileOutputStream fos = new FileOutputStream(out)) {
        final WaveFormGainFilter filter = new CosineGainFilter();
        filter.setWavelengthInSecs(20f);
        //            Filter filter = new FixFactorGainFIlter(0.9);
        MPEGAudio.encode(dr.getContent(), filter, fos, true);
    }

}
项目:bilibili-api    文件:RuleSuite.java   
@Override
protected void before() throws Throwable {
    //初始化 slf4j
    BasicConfigurator.configure();
    //读取配置文件
    try {
        Config.setConfig(
                new Gson().fromJson(
                        new BufferedReader(new InputStreamReader(Config.class.getResourceAsStream("/config.json"))),
                        Config.class
                )
        );
    } catch (NullPointerException e) {
        //抛出异常就可以取消测试
        throw new RuntimeException("Please create config file before tests");
    }
}
项目:FSTestTools    文件:InitLog4jLoggingRule.java   
@Override
public void before() throws Throwable {
    if (notConfigured) {
        //Now configure Log4J with console appender
        BasicConfigurator.configure();
        System.out.println("Configure Log4J for basic output...");
        System.out.println("Set root LOGGER loglevel to '" + rootLevel.toString() + "'!");
        Logger.getRootLogger().setLevel(rootLevel);
    }
    if (logger == null) {
        Logger.getRootLogger().setAdditivity(false);
        logger = Logger.getLogger(InitLog4jLoggingRule.class);
        logger.setAdditivity(false);
    }
    //Init FirstSpirit Logging with special logger
    Logging.init(new FS2Log4JLogger());
}
项目:unitimes    文件:AGHCourseDetailsProvider.java   
public static void main(String[] args) {
    try {
        BasicConfigurator.configure();
        Debug.info(" - Initializing Hibernate ... ");
        _RootDAO.initialize();

        ApplicationProperties.getConfigProperties().setProperty(
                ApplicationProperty.CustomizationDefaultCourseUrl.key(),
                "http://syllabuskrk.agh.edu.pl/:years/pl/magnesite/modules/:courseNbr");
        ApplicationProperties.getConfigProperties().setProperty("unitime.custom.default.course_api_url",
                "http://syllabuskrk.agh.edu.pl/api/:years/modules/:courseNbr");


        ApplicationProperties.getDefaultProperties()
                .setProperty(ApplicationProperty.CustomizationDefaultCourseDetailsDownload.key(), "true");

        System.out.println("URL:" + new AGHCourseDetailsProvider()
                .getCourseUrl(new AcademicSessionInfo(231379l, "2015", "Semestr zimowy", "AGH"), "BAND", "101"));

        System.out.println("Details:\n" + new AGHCourseDetailsProvider()
                .getDetails(new AcademicSessionInfo(231379l, "2015", "Semestr zimowy", "AGH"), "BAND", "101"));

    } catch (Exception e) {
        e.printStackTrace();
    }
}
项目:datalog-parser    文件:Application.java   
public static void main(String[] args) {

        BasicConfigurator.configure();

        port(4567);
        if (getPassword() != null && getKeystorePath() != null) {
            secure(getKeystorePath(), getPassword(), null, null);
        }

        staticFiles.location("/public");
        staticFiles.expireTime(600L);

        before("*", Filters.addTrailingSlashes);
        before("*", Filters.handleLocaleChange);

        get(Path.Web.INDEX, IndexController.serveIndexPage);
        get(Path.Web.PARSE, DatalogController.parseDatalog);

        after("*", Filters.addGzipHeader);
    }
项目:aws-sdk-java-v2    文件:SqsIntegrationTest.java   
/**
 * In the following test, we purposely setting the time offset to trigger a clock skew error.
 * The time offset must be fixed and then we validate the global value for time offset has been
 * update.
 */
@Test
public void clockSkewFailure_CorrectsGlobalTimeOffset() throws Exception {
    BasicConfigurator.configure();
    final int originalOffset = SdkGlobalTime.getGlobalTimeOffset();
    final int skew = 3600;

    SdkGlobalTime.setGlobalTimeOffset(skew);
    assertEquals(skew, SdkGlobalTime.getGlobalTimeOffset());
    SQSAsyncClient sqsClient = createSqsAyncClient();

    sqsClient.listQueues(ListQueuesRequest.builder().build()).thenCompose( __ -> {
        assertThat("Clockskew is fixed!", SdkGlobalTime.getGlobalTimeOffset(), lessThan(skew));
        // subsequent changes to the global time offset won't affect existing client
        SdkGlobalTime.setGlobalTimeOffset(skew);
        return sqsClient.listQueues(ListQueuesRequest.builder().build());
    }).thenAccept( __ ->  {
        assertEquals(skew, SdkGlobalTime.getGlobalTimeOffset());
    }).join();

    sqsClient.close();

    SdkGlobalTime.setGlobalTimeOffset(originalOffset);
}
项目:big-data-benchmark    文件:HadoopWordCount.java   
public static void main(String[] args) throws Exception {
    BasicConfigurator.configure();
    Configuration conf = new Configuration();
    conf.setQuietMode(true);

    Job job = Job.getInstance(conf, "WordCount");
    job.setJarByClass(HadoopWordCount.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);

    job.setMapperClass(Map.class);
    job.setCombinerClass(Reduce.class);
    job.setReducerClass(Reduce.class);

    job.setInputFormatClass(TextInputFormat.class);
    job.setOutputFormatClass(TextOutputFormat.class);

    FileInputFormat.setInputPaths(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1] + "_" + System.currentTimeMillis()));

    long t = System.currentTimeMillis();
    job.waitForCompletion(true);

    System.out.println("TotalTime=" + (System.currentTimeMillis() - t));
}
项目:benchmarx    文件:EMoflonFamiliesToPersons.java   
@Override
public void initiateSynchronisationDialogue() {
    BasicConfigurator.configure();
    helper = new F2PSyncHelper();       
    Resource r = helper.getResourceSet().createResource(URI.createURI("sourceModel"));
    FamilyRegister familiesRoot = FamiliesFactory.eINSTANCE.createFamilyRegister();
    r.getContents().add(familiesRoot);

    // Fix default preferences (which can be overwritten)
    setConfigurator(new Configurator<Decisions>()
            .makeDecision(Decisions.PREFER_CREATING_PARENT_TO_CHILD, true)
            .makeDecision(Decisions.PREFER_EXISTING_FAMILY_TO_NEW, true));

    // perform batch to establish consistent starting state
    helper.setSrc(familiesRoot);
    helper.integrateForward();  

    helper.setMute(true);
}
项目:Android-Client-Server    文件:Log4jServlet.java   
/**
 * Initialization of the servlet. <br>
 *
 * @throws ServletException if an error occurs
 */
public void init(ServletConfig config) throws ServletException {
    // Put your code here
    System.out.println("Log4jServlet 正在初始化log4j日志设置信息");
    String log4jLocation = config.getInitParameter("log4j-properties-location");
    ServletContext sc = config.getServletContext();
    if(log4jLocation == null){
        System.out.println("** 没有log4j-properties-location 初始化文件,所以使用BasicConfiguratorc初始化 **");
        BasicConfigurator.configure();
    }else{
        String webAppPath = sc.getRealPath("/");
        String log4jProp = webAppPath + log4jLocation;
        File log4File = new File(log4jProp);
        if(log4File.exists()){
            System.out.println("使用:"+ log4jProp + "初始化日志设置信息");
            PropertyConfigurator.configure(log4jProp);
        }else{
            System.out.println("*****" + log4jProp + "文件没有找到,所以使用BasicConfigurator初始化*****");
            BasicConfigurator.configure();
        }
    }
    super.init(config);
    System.out.println("---------log4jServlet  初始化log4j日志设置信息完成--------");

}
项目:FunnySpider    文件:SpiderApp.java   
/**
 * 设置日志配置
 * @param level
 * @param fileName
 * @param append
 * @return this
 */
public SpiderApp setLogerConfig(int level,String fileName,boolean append,String configFilename){
    logMap.put("level", level);
    logMap.put("fileName", fileName);
    logMap.put("append",append);
    logMap.put("configFilename",configFilename);
    logger = Logger.getLogger(SpiderApp.class);
    Level logLevel = getLogLevel(level);
    try {
        FileAppender appender = new FileAppender(new SimpleLayout(), fileName,append);
        if(null == configFilename){
            BasicConfigurator.configure();
        }else{
            PropertyConfigurator.configure(configFilename);
        }
        logger.addAppender(appender);
        logger.setLevel(logLevel);
    } catch (IOException e) {
        e.printStackTrace(System.err);
    } 
    return this;
}
项目:hdfs-mesos    文件:HdfsMesosTestCase.java   
@Before
public void before() throws Exception {
    BasicConfigurator.configure();
    Scheduler.$.initLogging();

    File storageFile = Files.createTempFile(MesosTestCase.class.getSimpleName(), null).toFile();
    assertTrue(storageFile.delete());
    Nodes.storage = Storage.file(storageFile);
    Nodes.reset();

    Scheduler.Config config = Scheduler.$.config;
    config.api = "http://localhost:" + Net.findAvailPort();
    config.jar = new File("hdfs-mesos-0.1.jar");
    config.hadoop = new File("hadoop-1.2.1.tar.gz");

    Cli.api = config.api;
    Scheduler.$.subscribed(schedulerDriver, "id", new Master());
}
项目:JHelioviewer-SWHV    文件:LogSettings.java   
/**
 * @param defaultLogSettingsPath
 *            Path to the default log settings
 * @param logsDirectory
 *            Path to the directory where the log files are stored
 */
public static void init(String defaultLogSettingsPath, String logsDirectory) {
    Properties settings = new Properties();
    try (InputStream is = FileUtils.getResourceInputStream(defaultLogSettingsPath)) {
        settings.load(is);
    } catch (IOException e) {
        Log.log.error("Could not load default log settings: " + e.getMessage());
    }

    String filePattern = "'jhv.'yyyy-MM-dd'T'HH-mm-ss'.log'";

    settings.setProperty("log4j.appender.file.Directory", logsDirectory);
    settings.setProperty("log4j.appender.file.Pattern", filePattern);

    SimpleDateFormat formatter = new SimpleDateFormat(filePattern);
    formatter.setTimeZone(TimeZone.getTimeZone(System.getProperty("user.timezone")));
    settings.setProperty("log4j.appender.file.TimeStamp", formatter.format(new Date()));

    BasicConfigurator.configure();
    PropertyConfigurator.configure(settings);
}
项目:common-libraries    文件:XdsbRepositoryAdapterIT.java   
@BeforeClass
public static void setUp() throws Exception {
    BasicConfigurator.configure();

    endpointAddress = DEV_XDSB_REPOSITORY_ENDPOINT;
    final XdsbRepositoryWebServiceClient client = new XdsbRepositoryWebServiceClient(endpointAddress);
    client.setOutInterceptors(Collections.singletonList(new ContentTypeRebuildingOutboundSoapInterceptor()));
    client.setLoggingInterceptorsEnabled(true);

    xdsbRepositoryAdapter = new XdsbRepositoryAdapter(client, new SimpleMarshallerImpl(), new XmlTransformerImpl(new SimpleMarshallerImpl()));


    fileReader = new FileReaderImpl();
    c32 = fileReader.readFile("uploadC32.xml");
    c32_with_c2s_mrn_oid = fileReader.readFile("uploadC32_C2S_OID.xml");
    CCDA11 = fileReader.readFile("C-CDA_R1.1.xml");
    CCDA20 = fileReader.readFile("C-CDA_R2.0.xml");
    CCDA21 = fileReader.readFile("C-CDA_R2.1.xml");
}
项目:tracing-framework    文件:PublishTool.java   
public static void main(String[] args) throws InterruptedException {
    // Configure console logging
    BasicConfigurator.configure();

    String topic = "publishing_to_topic";
    if (args.length > 0) {
        topic = args[0];
    }
    System.out.println("Publishing to topic " + topic);

    while (!Thread.currentThread().isInterrupted()) {
        String s = RandomStringUtils.randomAlphanumeric(10);
        PubSub.publish(topic, s);
        Thread.sleep(1000);
    }
}
项目:tracing-framework    文件:SubscribeTool.java   
public static void main(String[] args) throws InterruptedException {
    // Configure console logging
    BasicConfigurator.configure();

    String topic = "publishing_to_topic";
    if (args.length > 0) {
        topic = args[0];
    }
    System.out.println("Subscribing to topic " + topic);

    // Subscribe to the topic
    PubSub.subscribe(topic, new SubscribeToolSubscriber());

    // Wait for the client to be interrupted
    PubSub.client().join();
}
项目:grouper    文件:Action.java   
/**
 * Sends a message to the group the user is in
 */
public void message(){
    try{
        String key = content.substring(0, Math.min(140, content.length()));
        Selector selector = new Selector("jdbc:mysql://localhost:3306/Grouper", SQL.username, SQL.password);
        ResultSet selected = selector.select("*", "Users", "Number='"+number+"'");
        while (selected.next()){
            if (selected.getString("Number").equals(number)){
                ResultSet users = selector.select("*", "Users", "Chat="+selected.getInt("Chat"));
                while (users.next()){
                    if (selected.getInt("Chat") == users.getInt("Chat") && !selected.getString("Number").equals(users.getString("Number"))){
                        (new SendSms(users.getString("Number"), selected.getString("Name")+": "+content)).sendSms();
                    }
                }
            }
        }
        selector.close();
    } catch (SQLException ex) {
        BasicConfigurator.configure();
        log.info("SQLException: " + ex.getMessage());
        log.info("SQLState: " + ex.getSQLState());
        log.info("VendorError: " + ex.getErrorCode());
    }
}
项目:gflogger    文件:TestGarbageDefaultLoggerServiceImpl.java   
@Test
public void testLog4JAppendString() throws Exception {
    BasicConfigurator.configure();
    org.apache.commons.logging.Log log = org.apache.commons.logging.LogFactory.getLog("com.db.fxpricing.Logger");

    for(long i = 0; i < WARMUP_COUNT; i++)
        log.info("value");
    Thread.sleep(1000L);

    objectCounting.set(true);
    for(long i = 0; i < TEST_COUNT; i++)
        log.info("value");

    Thread.sleep(500L);

    printState("log4j-string");
}
项目:gflogger    文件:TestGarbageDefaultLoggerServiceImpl.java   
@Test
public void testLog4JAppendLong() throws Exception {
    BasicConfigurator.configure();
    org.apache.commons.logging.Log log = org.apache.commons.logging.LogFactory.getLog("com.db.fxpricing.Logger");

    for(long i = 0; i < WARMUP_COUNT; i++)
        log.info("value:" + i);
    Thread.sleep(1000L);

    objectCounting.set(true);
    for(long i = 0; i < TEST_COUNT; i++)
        log.info("value:" + i);

    Thread.sleep(500L);

    printState("log4j-long");
}
项目:gflogger    文件:TestGarbageDefaultLoggerServiceImpl.java   
@Test
public void testLog4JAppendLongs() throws Exception {
    BasicConfigurator.configure();
    org.apache.commons.logging.Log log = org.apache.commons.logging.LogFactory.getLog("com.db.fxpricing.Logger");

    for(long i = 0; i < WARMUP_COUNT; i++)
        log.info("value:" + i + " " + i + " " + i + " " + i + " " + i
                + i + " " + i + " " + i + " " + i + " " + i);
    Thread.sleep(1000L);

    objectCounting.set(true);
    for(long i = 0; i < TEST_COUNT; i++)
        log.info("value:" + i + " " + i + " " + i + " " + i + " " + i
                + i + " " + i + " " + i + " " + i + " " + i);

    Thread.sleep(500L);

    printState("log4j-longs");
}
项目:cookjson    文件:JsonPathProviderTest.java   
@Test
public void testParseString () throws IOException
{
    BasicConfigurator.configure ();
    String f = "../tests/data/complex1.json";
    File file = new File (f.replace ('/', File.separatorChar));
    String str = Utils.getString (file);

    JsonPathProvider provider = new JsonPathProvider ();

    Configuration pathConfig = Configuration.defaultConfiguration ().jsonProvider (provider);
    JsonPath path = JsonPath.compile ("$.strange");
    JsonValue value = path.read (str, pathConfig);

    // we cannot directly compare the output since the attribute ordering
    // can vary.
    Assert.assertEquals ("{\"id\":5555,\"price\":[1,2,3],\"customer\":\"john...\"}".length (), provider.toJson (value).length ());
}
项目:cookjson    文件:JsonPathProviderTest.java   
@Test
public void testBson () throws IOException
{
    BasicConfigurator.configure ();
    String f = "../tests/data/data1.bson";
    File file = new File (f.replace ('/', File.separatorChar));

    JsonPathProvider provider = new JsonPathProvider ();

    Configuration pathConfig = Configuration.defaultConfiguration ().jsonProvider (provider);
    JsonPath path = JsonPath.compile ("$..A");

    JsonProvider p = new CookJsonProvider ();
    HashMap<String, Object> readConfig = new HashMap<String, Object> ();
    readConfig.put (CookJsonProvider.FORMAT, CookJsonProvider.FORMAT_BSON);
    readConfig.put (CookJsonProvider.ROOT_AS_ARRAY, Boolean.TRUE);
    JsonReaderFactory rf = p.createReaderFactory (readConfig);
    JsonReader reader = rf.createReader (new FileInputStream (file));
    JsonStructure obj = reader.read ();
    reader.close ();

    JsonValue value = path.read (obj, pathConfig);

    Assert.assertEquals ("[1,3,5,7]", provider.toJson (value));
}
项目:eagle    文件:EagleDaemon.java   
public static void main(String[] args) throws Exception {
  OptionParser parser = new OptionParser();
  parser.accepts("c", "configuration file (required)").
    withRequiredArg().ofType(String.class);
  parser.accepts("help", "print help statement");
  OptionSet options = parser.parse(args);

  if (options.has("help") || !options.has("c")) {
    parser.printHelpOn(System.out);
    System.exit(-1);
  }

  // Set up a simple configuration that logs on the console.
  BasicConfigurator.configure();

  Logging.configureAuditLogging();

  String configFile = (String) options.valueOf("c");
  Configuration conf = new PropertiesConfiguration(configFile);
  EagleDaemon eagleDaemon = new EagleDaemon();
  eagleDaemon.initialize(conf);
}
项目:notaql    文件:NotaQLColumnTest.java   
@Before
public void setUp() throws Exception {
    BasicConfigurator.configure();
    engines = "IN-ENGINE: hbase(table_id <- '"+inTable +"'),OUT-ENGINE: hbase(table_id <- '"+outTable +"'),";

    this.conf = HBaseConfiguration.create();

    NotaQL.loadConfig("settings.config");

    String host = NotaQL.prop.getProperty("hbase_host");

    if(host == null)
        host = "localhost";

    conf.set("hbase.zookeeper.quorum", host);
    conf.set("hbase.zookeeper.property.clientPort","2181");
    conf.set("hbase.master", host + ":60000");
}
项目:sakai    文件:AssignmentActionTestTools.java   
@Before
public void setUp() {
    BasicConfigurator.configure();
    PowerMockito.mockStatic(ComponentManager.class);
    // A mock component manager.
    when(ComponentManager.get(any(Class.class))).then(new Answer<Object>() {
        private Map<Class, Object> mocks = new HashMap<>();
        @Override
        public Object answer(InvocationOnMock invocation) throws Throwable {
            Class classToMock = (Class) invocation.getArguments()[0];
            return mocks.computeIfAbsent(classToMock, k -> mock(classToMock));
        }
    });

    when(ComponentManager.get(SessionManager.class).getCurrentSession()).thenReturn(mock(Session.class));
    when(FormattedText.getDecimalSeparator()).thenReturn(".");

    when(FormattedText.getNumberFormat()).thenReturn(NumberFormat.getInstance(Locale.ENGLISH));
    assignmentAction = new AssignmentAction();

    Mockito.when(ComponentManager.get(AssignmentService.class)).thenReturn(assignmentService);

}
项目:VirtaMarketAnalyzer    文件:TransportParser.java   
public static void main(final String[] args) throws IOException {
    BasicConfigurator.configure(new ConsoleAppender(new PatternLayout("%r %d{ISO8601} [%t] %p %c %x - %m%n")));
    final String host = Wizard.host;
    final String realm = "olga";
    logger.info("begin");
    final List<Country> countries = CityInitParser.getCountries(host, realm);
    logger.info("countries.size = {}", countries.size());
    final List<Region> regions = CityInitParser.getRegions(host, realm);
    logger.info("regions.size = {}", regions.size());
    final List<City> cities = CityListParser.fillWealthIndex(host, realm, regions);
    logger.info("cities.size = {}", cities.size());
    final List<Product> materials = ProductInitParser.getManufactureProducts(host, realm);
    logger.info("materials.size = {}", materials.size());
    logger.info("парсим транспортные расходы, {}", materials.size() * cities.size());

    TransportParser.setRowsOnPage(host, realm, Math.max(400, cities.size()), cities.get(0), materials.get(0));
    final List<Transport> list = TransportParser.parseTransport(host, realm, cities, cities.get(0), materials.get(0));
    logger.info(Utils.getPrettyGson(list));
    logger.info("list.size = {}", list.size());
}
项目:incubator-rya    文件:CopyTool.java   
public static void main(final String[] args) {
    final String log4jConfiguration = System.getProperties().getProperty("log4j.configuration");
    if (StringUtils.isNotBlank(log4jConfiguration)) {
        final String parsedConfiguration = PathUtils.clean(StringUtils.removeStart(log4jConfiguration, "file:"));
        final File configFile = new File(parsedConfiguration);
        if (configFile.exists()) {
            DOMConfigurator.configure(parsedConfiguration);
        } else {
            BasicConfigurator.configure();
        }
    }
    log.info("Starting Copy Tool");

    Thread.setDefaultUncaughtExceptionHandler((thread, throwable) -> log.error("Uncaught exception in " + thread.getName(), throwable));

    final CopyTool copyTool = new CopyTool();
    final int returnCode = copyTool.setupAndRun(args);

    log.info("Finished running Copy Tool");

    System.exit(returnCode);
}
项目:incubator-rya    文件:MergeTool.java   
public static void main(final String[] args) {
    final String log4jConfiguration = System.getProperties().getProperty("log4j.configuration");
    if (StringUtils.isNotBlank(log4jConfiguration)) {
        final String parsedConfiguration = PathUtils.clean(StringUtils.removeStart(log4jConfiguration, "file:"));
        final File configFile = new File(parsedConfiguration);
        if (configFile.exists()) {
            DOMConfigurator.configure(parsedConfiguration);
        } else {
            BasicConfigurator.configure();
        }
    }
    log.info("Starting Merge Tool");

    Thread.setDefaultUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
        @Override
        public void uncaughtException(final Thread thread, final Throwable throwable) {
            log.error("Uncaught exception in " + thread.getName(), throwable);
        }
    });

    final int returnCode = setupAndRun(args);

    log.info("Finished running Merge Tool");

    System.exit(returnCode);
}
项目:unitime    文件:AGHCourseDetailsProvider.java   
public static void main(String[] args) {
    try {
        BasicConfigurator.configure();
        Debug.info(" - Initializing Hibernate ... ");
        _RootDAO.initialize();

        ApplicationProperties.getConfigProperties().setProperty(
                ApplicationProperty.CustomizationDefaultCourseUrl.key(),
                "http://syllabuskrk.agh.edu.pl/:years/pl/magnesite/modules/:courseNbr");
        ApplicationProperties.getConfigProperties().setProperty("unitime.custom.default.course_api_url",
                "http://syllabuskrk.agh.edu.pl/api/:years/modules/:courseNbr");


        ApplicationProperties.getDefaultProperties()
                .setProperty(ApplicationProperty.CustomizationDefaultCourseDetailsDownload.key(), "true");

        System.out.println("URL:" + new AGHCourseDetailsProvider()
                .getCourseUrl(new AcademicSessionInfo(231379l, "2015", "Semestr zimowy", "AGH"), "BAND", "101"));

        System.out.println("Details:\n" + new AGHCourseDetailsProvider()
                .getDetails(new AcademicSessionInfo(231379l, "2015", "Semestr zimowy", "AGH"), "BAND", "101"));

    } catch (Exception e) {
        e.printStackTrace();
    }
}
项目:datasource    文件:SnoMedOboGenerator.java   
public static void main(String[] args) {
    BasicConfigurator.configure();
    File snomedDirectory = new File(
            "/Users/bill/Documents/snomed/SnomedCT_Release_INT_20130731/RF2Release/Full/Terminology");
    File snomedConceptFile = new File(snomedDirectory, "sct2_Concept_Full_INT_20130731.txt.activeOnly");
    File snomedDescriptionFile = new File(snomedDirectory, "sct2_Description_Full-en_INT_20130731.txt.activeOnly");
    File snomedRelationFile = new File(snomedDirectory, "sct2_Relationship_Full_INT_20130731.txt.activeOnly");
    File outputOboFile = new File(snomedDirectory, "snomed-restricted.obo");
    Set<String> rootNodesToInclude = CollectionsUtil.createSet("123037004", "404684003");
    try {
        generateObo(snomedConceptFile, snomedDescriptionFile, snomedRelationFile, outputOboFile, rootNodesToInclude);
    } catch (IOException e) {
        e.printStackTrace();
        System.exit(-1);
    }
}
项目:HelloKafka    文件:RunConsumer.java   
public static void main(String[] args) {
        // log4j init
        BasicConfigurator.configure();

        SimpleConsumer simpleConsumer = new SimpleConsumer("localhost:2181", "myGroupId", "HelloKafka", "10000");
        simpleConsumer.configure();
        simpleConsumer.start();

        String message;

        while ((message = simpleConsumer.fetchMessage()) != null) {

            System.out.println("Received from kafka: " + message);

            /**
             * If you wish to commit offsets on every message, uncomment this line.
             * Best practices is to batch commit offsets (performance wise) which on the other hand may give us problems
             * like if the consumer recovers from a crash it may received messages that he have already been processed,
             * and this is because we did not commit them.
             */
//            myConsumer.consumerConnector.commitOffsets();
        }
    }
项目:jstorm-0.9.6.3-    文件:LocalCluster.java   
protected void setLogger() {
    boolean needReset = true;
    Logger rootLogger = Logger.getRootLogger();
    if (rootLogger != null) {
        Enumeration appenders = rootLogger.getAllAppenders();
        if (appenders.hasMoreElements() == true) {
            needReset = false;
        }
    }

    if (needReset == true) {
        BasicConfigurator.configure();
        rootLogger.setLevel(Level.INFO);
    }

}
项目:semweb4j    文件:RdfSClassGenerator.java   
private void generateCode() throws MojoExecutionException, MojoFailureException {
    try {
        // make sure that directory for log file exists.
        rdfReactorLogfile.getParentFile().mkdirs();

        // configure logging infrastructure for RDFReactor
        FileAppender logFileAppender = new FileAppender(new SimpleLayout(), rdfReactorLogfile.getAbsolutePath());
        BasicConfigurator.configure(logFileAppender);

    } catch (IOException ioe) {
        throw new MojoExecutionException("Cannot open log file for writing RDFReactor log messages", ioe);
    }

    getLog().info("Generating code from RDF schema file " + schemaFile + " into dir " + outputDirectory
            + ". Classes will be in package " + packageName + " and with method prefix " + methodPrefix +". skipBuiltins is " + skipBuiltins + ".");
    getLog().info("RDFReactor's log messages are written to " + rdfReactorLogfile);


    try {
        CodeGenerator.generate(schemaFile.getAbsolutePath(), outputDirectory.getAbsolutePath(), packageName, Reasoning.rdfs, skipBuiltins, methodPrefix);
    } catch (Exception e) {
        e.printStackTrace();
        throw new MojoFailureException(e, "RDFS processing error", "Could not generate code from the specified RDF schema file.");
    }
}
项目:java-algebra-system    文件:Examples.java   
/**
 * main.
 */
public static void main(String[] args) {
    BasicConfigurator.configure();
    if (args.length > 0) {
        example1();
        example2();
        example3();
        example4();
    }
    example5();
    example6();
    example10();
    example11();
    example12();
    ComputerThreads.terminate();
}
项目:Pinot    文件:TestStarTreeSegmentCreator.java   
@Test(enabled = true)
  public void testCreation() throws Exception {
    BasicConfigurator.configure();

    final SegmentGeneratorConfig config =
        SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns(avroFile, indexDir, "daysSinceEpoch",
            TimeUnit.DAYS, "testTable");
    config.setSegmentNamePostfix("1");
    config.setTimeColumnName("daysSinceEpoch");

    // Set the star tree index config
    StarTreeIndexSpec starTreeIndexSpec = new StarTreeIndexSpec();
//    starTreeIndexSpec.setSplitExcludes(Arrays.asList("D1", "daysSinceEpoch"));
    starTreeIndexSpec.setSplitExcludes(Arrays.asList("daysSinceEpoch"));
    starTreeIndexSpec.setMaxLeafRecords(4);
    config.getSchema().setStarTreeIndexSpec(starTreeIndexSpec);

    final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null);
    driver.init(config);
    driver.build();
  }
项目:rct-java    文件:Echo.java   
public static void main(String[] args) {

        BasicConfigurator.configure();

        if (args.length != 2) {
            System.err.println("Required 2 arguments!");
            System.exit(1);
        }
        try {
            TransformReceiver transformer = TransformerFactory.getInstance()
                    .createTransformReceiver();

            Thread.sleep(1000);

            Transform t = transformer.lookupTransform(args[0], args[1],
                    System.currentTimeMillis());

            System.out.println(t);
        } catch (Exception e) {
            e.printStackTrace();
            System.exit(1);
        }

        System.exit(0);
    }