Python multiprocessing 模块,get_logger() 实例源码

我们从Python开源项目中,提取了以下16个代码示例,用于说明如何使用multiprocessing.get_logger()

项目:BrundleFuzz    作者:carlosgprado    | 项目源码 | 文件源码
def main():
    """Starts several processes

    This must be kept to the bare minimum
    """
    multiprocessing.log_to_stderr()
    logger = multiprocessing.get_logger()
    logger.setLevel(logging.INFO)

    jobs = []

    try:
        bfs = BrundleFuzzServer()
        jobs.append(bfs)
        bfs.start()

        for j in jobs:
            j.join()

    except KeyboardInterrupt:
        pass
项目:ac_pysmac    作者:belkhir-nacim    | 项目源码 | 文件源码
def smac_classpath():
    """
    Small function gathering all information to build the java class path.

    :returns: string representing the Java classpath for SMAC

    """
    import multiprocessing
    from pkg_resources import resource_filename

    logger = multiprocessing.get_logger()

    smac_folder = resource_filename("ac_pysmac", 'smac/%s' % ac_pysmac.remote_smac.SMAC_VERSION)

    smac_conf_folder = os.path.join(smac_folder, "conf")
    smac_patches_folder = os.path.join(smac_folder, "patches")
    smac_lib_folder = os.path.join(smac_folder, "lib")

    classpath = [fname for fname in os.listdir(smac_lib_folder) if fname.endswith(".jar")]
    classpath = [os.path.join(smac_lib_folder, fname) for fname in classpath]
    classpath = [os.path.abspath(fname) for fname in classpath]
    classpath.append(os.path.abspath(smac_conf_folder))
    classpath.append(os.path.abspath(smac_patches_folder))

    # For Windows compability
    classpath = (os.pathsep).join(classpath)
    logger.debug("SMAC classpath: %s", classpath)
    return classpath
项目:oil    作者:oilshell    | 项目源码 | 文件源码
def test_enable_logging(self):
        logger = multiprocessing.get_logger()
        logger.setLevel(util.SUBWARNING)
        self.assertTrue(logger is not None)
        logger.debug('this will not be printed')
        logger.info('nor will this')
        logger.setLevel(LOG_LEVEL)
项目:python2-tracer    作者:extremecoders-re    | 项目源码 | 文件源码
def test_enable_logging(self):
        logger = multiprocessing.get_logger()
        logger.setLevel(util.SUBWARNING)
        self.assertTrue(logger is not None)
        logger.debug('this will not be printed')
        logger.info('nor will this')
        logger.setLevel(LOG_LEVEL)
项目:python2-tracer    作者:extremecoders-re    | 项目源码 | 文件源码
def _test_level(cls, conn):
        logger = multiprocessing.get_logger()
        conn.send(logger.getEffectiveLevel())
项目:ouroboros    作者:pybee    | 项目源码 | 文件源码
def test_enable_logging(self):
        logger = multiprocessing.get_logger()
        logger.setLevel(util.SUBWARNING)
        self.assertTrue(logger is not None)
        logger.debug('this will not be printed')
        logger.info('nor will this')
        logger.setLevel(LOG_LEVEL)
项目:ouroboros    作者:pybee    | 项目源码 | 文件源码
def _test_level(cls, conn):
        logger = multiprocessing.get_logger()
        conn.send(logger.getEffectiveLevel())
项目:python_rabbitmq_multiprocessing_crawl    作者:ghotiv    | 项目源码 | 文件源码
def error(msg, *args):
    return multiprocessing.get_logger().error(msg, *args)
项目:ac_pysmac    作者:belkhir-nacim    | 项目源码 | 文件源码
def __init__(self, scenario_fn, additional_options_fn, seed, class_path, memory_limit, parser_dict,
                 java_executable):
        """
        Starts SMAC in IPC mode. SMAC will wait for udp messages to be sent.
        """
        self.__parser = parser_dict
        self.__subprocess = None
        self.__logger = multiprocessing.get_logger()

        # establish a socket
        self.__sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self.__sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
        self.__sock.settimeout(3)
        self.__sock.bind(('', 0))
        self.__sock.listen(1)

        self.__port = self.__sock.getsockname()[1]
        self.__logger.debug('picked port %i' % self.__port)

        # build the java command
        cmds = java_executable.split()
        if memory_limit is not None:
            cmds += ["-Xmx%im" % memory_limit]
        cmds += ["-XX:ParallelGCThreads=4",
                 "-cp",
                 class_path,
                 "ca.ubc.cs.beta.smac.executors.SMACExecutor",
                 "--scenario-file", scenario_fn,
                 "--tae", "IPC",
                 "--ipc-mechanism", "TCP",
                 "--ipc-remote-port", str(self.__port),
                 "--seed", str(seed)
                 ]

        with open(additional_options_fn, 'r') as fh:
            for line in fh:
                name, value = line.strip().split(' ')
                cmds += ['--%s' % name, '%s' % value]

        self.__logger.debug("SMAC command: %s" % (' '.join(cmds)))

        self.__logger.debug("Starting SMAC in ICP mode")

        # connect the output to the logger if the appropriate level has been set
        if self.__logger.level < logging.WARNING:
            self.__subprocess = subprocess.Popen(cmds, stdout=sys.stdout, stderr=sys.stderr)
        else:
            with open(os.devnull, "w") as fnull:
                self.__subprocess = subprocess.Popen(cmds, stdout=fnull, stderr=fnull)
项目:ac_pysmac    作者:belkhir-nacim    | 项目源码 | 文件源码
def enforce_limits(mem_in_mb=None, cpu_time_in_s=None, wall_time_in_s=None, num_processes=None, grace_period_in_s=None):
    logger = multiprocessing.get_logger()

    if mem_in_mb is not None:
        logger.debug("restricting your function to {} mb memory.".format(mem_in_mb))
    if cpu_time_in_s is not None:
        logger.debug("restricting your function to {} seconds cpu time.".format(cpu_time_in_s))
    if wall_time_in_s is not None:
        logger.debug("restricting your function to {} seconds wall time.".format(wall_time_in_s))
    if num_processes is not None:
        logger.debug("restricting your function to {} threads/processes.".format(num_processes))
    if grace_period_in_s is None:
        grace_period_in_s = 0

    def actual_decorator(func):

        def wrapped_function(*args, **kwargs):
            global return_value
            logger = multiprocessing.get_logger()

            # create a pipe to retrieve the return value
            parent_conn, child_conn = multiprocessing.Pipe()

            # create and start the process
            subproc = multiprocessing.Process(target=subprocess_func, name=" multiproc function call", args=(func,
                                                                                                             child_conn,
                                                                                                             mem_in_mb,
                                                                                                             cpu_time_in_s,
                                                                                                             wall_time_in_s,
                                                                                                             num_processes) + args,
                                              kwargs=kwargs)
            logger.debug("Your function is called now.")

            return_value = None

            # start the process
            subproc.start()
            child_conn.close()

            try:
                # read the return value
                return_value = parent_conn.recv()
            except EOFError:  # Don't see that in the unit tests :(
                logger.debug("Your function call closed the pipe prematurely -> None will be returned")
                return_value = None
            except:
                raise
            finally:
                # don't leave zombies behind
                subproc.join()
                return (return_value);

        return wrapped_function

    return actual_decorator
项目:oil    作者:oilshell    | 项目源码 | 文件源码
def _test_level(cls, conn):
        logger = multiprocessing.get_logger()
        conn.send(logger.getEffectiveLevel())
项目:oil    作者:oilshell    | 项目源码 | 文件源码
def test_level(self):
        LEVEL1 = 32
        LEVEL2 = 37

        logger = multiprocessing.get_logger()
        root_logger = logging.getLogger()
        root_level = root_logger.level

        reader, writer = multiprocessing.Pipe(duplex=False)

        logger.setLevel(LEVEL1)
        p = self.Process(target=self._test_level, args=(writer,))
        p.daemon = True
        p.start()
        self.assertEqual(LEVEL1, reader.recv())

        logger.setLevel(logging.NOTSET)
        root_logger.setLevel(LEVEL2)
        p = self.Process(target=self._test_level, args=(writer,))
        p.daemon = True
        p.start()
        self.assertEqual(LEVEL2, reader.recv())

        root_logger.setLevel(root_level)
        logger.setLevel(level=LOG_LEVEL)


# class _TestLoggingProcessName(BaseTestCase):
#
#     def handle(self, record):
#         assert record.processName == multiprocessing.current_process().name
#         self.__handled = True
#
#     def test_logging(self):
#         handler = logging.Handler()
#         handler.handle = self.handle
#         self.__handled = False
#         # Bypass getLogger() and side-effects
#         logger = logging.getLoggerClass()(
#                 'multiprocessing.test.TestLoggingProcessName')
#         logger.addHandler(handler)
#         logger.propagate = False
#
#         logger.warn('foo')
#         assert self.__handled

#
# Check that Process.join() retries if os.waitpid() fails with EINTR
#
项目:oil    作者:oilshell    | 项目源码 | 文件源码
def test_main(run=None):
    if sys.platform.startswith("linux"):
        try:
            lock = multiprocessing.RLock()
        except OSError:
            raise unittest.SkipTest("OSError raises on RLock creation, see issue 3111!")

    check_enough_semaphores()

    if run is None:
        from test.test_support import run_unittest as run

    util.get_temp_dir()     # creates temp directory for use by all processes

    multiprocessing.get_logger().setLevel(LOG_LEVEL)

    ProcessesMixin.pool = multiprocessing.Pool(4)
    ThreadsMixin.pool = multiprocessing.dummy.Pool(4)
    ManagerMixin.manager.__init__()
    ManagerMixin.manager.start()
    ManagerMixin.pool = ManagerMixin.manager.Pool(4)

    testcases = (
        sorted(testcases_processes.values(), key=lambda tc:tc.__name__) +
        sorted(testcases_threads.values(), key=lambda tc:tc.__name__) +
        sorted(testcases_manager.values(), key=lambda tc:tc.__name__) +
        testcases_other
        )

    loadTestsFromTestCase = unittest.defaultTestLoader.loadTestsFromTestCase
    suite = unittest.TestSuite(loadTestsFromTestCase(tc) for tc in testcases)
    # (ncoghlan): Whether or not sys.exc_clear is executed by the threading
    # module during these tests is at least platform dependent and possibly
    # non-deterministic on any given platform. So we don't mind if the listed
    # warnings aren't actually raised.
    with test_support.check_py3k_warnings(
            (".+__(get|set)slice__ has been removed", DeprecationWarning),
            (r"sys.exc_clear\(\) not supported", DeprecationWarning),
            quiet=True):
        run(suite)

    ThreadsMixin.pool.terminate()
    ProcessesMixin.pool.terminate()
    ManagerMixin.pool.terminate()
    ManagerMixin.manager.shutdown()

    del ProcessesMixin.pool, ThreadsMixin.pool, ManagerMixin.pool
项目:python2-tracer    作者:extremecoders-re    | 项目源码 | 文件源码
def test_level(self):
        LEVEL1 = 32
        LEVEL2 = 37

        logger = multiprocessing.get_logger()
        root_logger = logging.getLogger()
        root_level = root_logger.level

        reader, writer = multiprocessing.Pipe(duplex=False)

        logger.setLevel(LEVEL1)
        p = self.Process(target=self._test_level, args=(writer,))
        p.daemon = True
        p.start()
        self.assertEqual(LEVEL1, reader.recv())

        logger.setLevel(logging.NOTSET)
        root_logger.setLevel(LEVEL2)
        p = self.Process(target=self._test_level, args=(writer,))
        p.daemon = True
        p.start()
        self.assertEqual(LEVEL2, reader.recv())

        root_logger.setLevel(root_level)
        logger.setLevel(level=LOG_LEVEL)


# class _TestLoggingProcessName(BaseTestCase):
#
#     def handle(self, record):
#         assert record.processName == multiprocessing.current_process().name
#         self.__handled = True
#
#     def test_logging(self):
#         handler = logging.Handler()
#         handler.handle = self.handle
#         self.__handled = False
#         # Bypass getLogger() and side-effects
#         logger = logging.getLoggerClass()(
#                 'multiprocessing.test.TestLoggingProcessName')
#         logger.addHandler(handler)
#         logger.propagate = False
#
#         logger.warn('foo')
#         assert self.__handled

#
# Check that Process.join() retries if os.waitpid() fails with EINTR
#
项目:python2-tracer    作者:extremecoders-re    | 项目源码 | 文件源码
def test_main(run=None):
    if sys.platform.startswith("linux"):
        try:
            lock = multiprocessing.RLock()
        except OSError:
            raise unittest.SkipTest("OSError raises on RLock creation, see issue 3111!")

    check_enough_semaphores()

    if run is None:
        from test.test_support import run_unittest as run

    util.get_temp_dir()     # creates temp directory for use by all processes

    multiprocessing.get_logger().setLevel(LOG_LEVEL)

    ProcessesMixin.pool = multiprocessing.Pool(4)
    ThreadsMixin.pool = multiprocessing.dummy.Pool(4)
    ManagerMixin.manager.__init__()
    ManagerMixin.manager.start()
    ManagerMixin.pool = ManagerMixin.manager.Pool(4)

    testcases = (
        sorted(testcases_processes.values(), key=lambda tc:tc.__name__) +
        sorted(testcases_threads.values(), key=lambda tc:tc.__name__) +
        sorted(testcases_manager.values(), key=lambda tc:tc.__name__) +
        testcases_other
        )

    loadTestsFromTestCase = unittest.defaultTestLoader.loadTestsFromTestCase
    suite = unittest.TestSuite(loadTestsFromTestCase(tc) for tc in testcases)
    # (ncoghlan): Whether or not sys.exc_clear is executed by the threading
    # module during these tests is at least platform dependent and possibly
    # non-deterministic on any given platform. So we don't mind if the listed
    # warnings aren't actually raised.
    with test_support.check_py3k_warnings(
            (".+__(get|set)slice__ has been removed", DeprecationWarning),
            (r"sys.exc_clear\(\) not supported", DeprecationWarning),
            quiet=True):
        run(suite)

    ThreadsMixin.pool.terminate()
    ProcessesMixin.pool.terminate()
    ManagerMixin.pool.terminate()
    ManagerMixin.manager.shutdown()

    del ProcessesMixin.pool, ThreadsMixin.pool, ManagerMixin.pool
项目:ouroboros    作者:pybee    | 项目源码 | 文件源码
def test_level(self):
        LEVEL1 = 32
        LEVEL2 = 37

        logger = multiprocessing.get_logger()
        root_logger = logging.getLogger()
        root_level = root_logger.level

        reader, writer = multiprocessing.Pipe(duplex=False)

        logger.setLevel(LEVEL1)
        p = self.Process(target=self._test_level, args=(writer,))
        p.daemon = True
        p.start()
        self.assertEqual(LEVEL1, reader.recv())

        logger.setLevel(logging.NOTSET)
        root_logger.setLevel(LEVEL2)
        p = self.Process(target=self._test_level, args=(writer,))
        p.daemon = True
        p.start()
        self.assertEqual(LEVEL2, reader.recv())

        root_logger.setLevel(root_level)
        logger.setLevel(level=LOG_LEVEL)


# class _TestLoggingProcessName(BaseTestCase):
#
#     def handle(self, record):
#         assert record.processName == multiprocessing.current_process().name
#         self.__handled = True
#
#     def test_logging(self):
#         handler = logging.Handler()
#         handler.handle = self.handle
#         self.__handled = False
#         # Bypass getLogger() and side-effects
#         logger = logging.getLoggerClass()(
#                 'multiprocessing.test.TestLoggingProcessName')
#         logger.addHandler(handler)
#         logger.propagate = False
#
#         logger.warn('foo')
#         assert self.__handled

#
# Check that Process.join() retries if os.waitpid() fails with EINTR
#