Python daemon 模块,pidfile() 实例源码

我们从Python开源项目中,提取了以下14个代码示例,用于说明如何使用daemon.pidfile()

项目:incubator-airflow-old    作者:apache    | 项目源码 | 文件源码
def kerberos(args):  # noqa
    print(settings.HEADER)
    import airflow.security.kerberos

    if args.daemon:
        pid, stdout, stderr, log_file = setup_locations("kerberos", args.pid, args.stdout, args.stderr, args.log_file)
        stdout = open(stdout, 'w+')
        stderr = open(stderr, 'w+')

        ctx = daemon.DaemonContext(
            pidfile=TimeoutPIDLockFile(pid, -1),
            stdout=stdout,
            stderr=stderr,
        )

        with ctx:
            airflow.security.kerberos.run()

        stdout.close()
        stderr.close()
    else:
        airflow.security.kerberos.run()
项目:germqtt    作者:openstack-infra    | 项目源码 | 文件源码
def main():
    args = get_options()
    config = ConfigParser.ConfigParser()
    config.read(args.conffile)

    if config.has_option('default', 'pidfile'):
        pid_fn = os.path.expanduser(config.get('default', 'pidfile'))
    else:
        pid_fn = '/var/run/germqtt.pid'

    if args.foreground:
        _main(args, config)
    else:
        pid = pid_file_module.TimeoutPIDLockFile(pid_fn, 10)
        with daemon.DaemonContext(pidfile=pid):
            _main(args, config)
项目:thunderbolt100k    作者:cuyu    | 项目源码 | 文件源码
def main():
    parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,
                                     description='A cmdline tool to enhance PL9K theme for ZSH\n')
    subparsers = parser.add_subparsers(help='Use {subcommand} -h for each subcommand\'s optional arguments details',
                                       dest='command')
    subparsers.add_parser('init', help='Init the settings in `~/.zshrc` file')
    subparsers.add_parser('polling', help='Start the polling daemon process')
    display_parser = subparsers.add_parser('display', help='Print the corresponding info on the terminal')
    display_parser.add_argument('widget', help='The widget to display, e.g. weather')

    args = parser.parse_args()
    if args.command == 'polling':
        with daemon.DaemonContext(pidfile=daemon.pidfile.PIDLockFile(constants.PID_PATH)):
            polling()
    elif args.command == 'display':
        widgets = sys.modules['thunderbolt100k.widgets']
        assert hasattr(widgets, args.widget), 'There is no widget called {0}'.format(args.widget)
        assert hasattr(getattr(widgets, args.widget),
                       'display'), 'The widget {0} must contains a `display` method'.format(args.widget)
        result = getattr(widgets, args.widget).display()
        if result:
            print(result)
    elif args.command == 'init':
        init_zshrc()
项目:airflow    作者:apache-airflow    | 项目源码 | 文件源码
def kerberos(args):  # noqa
    print(settings.HEADER)
    import airflow.security.kerberos

    if args.daemon:
        pid, stdout, stderr, log_file = setup_locations("kerberos", args.pid, args.stdout, args.stderr, args.log_file)
        stdout = open(stdout, 'w+')
        stderr = open(stderr, 'w+')

        ctx = daemon.DaemonContext(
            pidfile=TimeoutPIDLockFile(pid, -1),
            stdout=stdout,
            stderr=stderr,
        )

        with ctx:
            airflow.security.kerberos.run()

        stdout.close()
        stderr.close()
    else:
        airflow.security.kerberos.run()
项目:incubator-airflow-old    作者:apache    | 项目源码 | 文件源码
def scheduler(args):
    print(settings.HEADER)
    job = jobs.SchedulerJob(
        dag_id=args.dag_id,
        subdir=process_subdir(args.subdir),
        run_duration=args.run_duration,
        num_runs=args.num_runs,
        do_pickle=args.do_pickle)

    if args.daemon:
        pid, stdout, stderr, log_file = setup_locations("scheduler", args.pid, args.stdout, args.stderr, args.log_file)
        handle = setup_logging(log_file)
        stdout = open(stdout, 'w+')
        stderr = open(stderr, 'w+')

        ctx = daemon.DaemonContext(
            pidfile=TimeoutPIDLockFile(pid, -1),
            files_preserve=[handle],
            stdout=stdout,
            stderr=stderr,
        )
        with ctx:
            job.run()

        stdout.close()
        stderr.close()
    else:
        signal.signal(signal.SIGINT, sigint_handler)
        signal.signal(signal.SIGTERM, sigint_handler)
        signal.signal(signal.SIGQUIT, sigquit_handler)
        job.run()
项目:incubator-airflow-old    作者:apache    | 项目源码 | 文件源码
def flower(args):
    broka = conf.get('celery', 'BROKER_URL')
    address = '--address={}'.format(args.hostname)
    port = '--port={}'.format(args.port)
    api = ''
    if args.broker_api:
        api = '--broker_api=' + args.broker_api

    flower_conf = ''
    if args.flower_conf:
        flower_conf = '--conf=' + args.flower_conf

    if args.daemon:
        pid, stdout, stderr, log_file = setup_locations("flower", args.pid, args.stdout, args.stderr, args.log_file)
        stdout = open(stdout, 'w+')
        stderr = open(stderr, 'w+')

        ctx = daemon.DaemonContext(
            pidfile=TimeoutPIDLockFile(pid, -1),
            stdout=stdout,
            stderr=stderr,
        )

        with ctx:
            os.execvp("flower", ['flower', '-b', broka, address, port, api, flower_conf])

        stdout.close()
        stderr.close()
    else:
        signal.signal(signal.SIGINT, sigint_handler)
        signal.signal(signal.SIGTERM, sigint_handler)

        os.execvp("flower", ['flower', '-b', broka, address, port, api, flower_conf])
项目:Needl    作者:eth0izzle    | 项目源码 | 文件源码
def main():
    parser = argparse.ArgumentParser(description=needl.__description__)
    parser.add_argument('--datadir', default=os.getcwd() + '/data', help='Data directory')
    parser.add_argument('-d', '--daemon', action='store_true', help='Run as a deamon')
    parser.add_argument('-v', '--verbose', action='store_true', help='Increase logging')
    parser.add_argument('--logfile', type=argparse.FileType('a'), default=sys.stdout, help='Log to this file. Default is stdout.')
    parser.add_argument('--pidfile', default='/tmp/needl.pid', help='Save process PID to this file. Default is /tmp/needl.pid. Only valid when running as a daemon.')
    args = parser.parse_args()

    if args.daemon and args.logfile is sys.stdout:
        args.logfile = open('/tmp/needl.log', 'a')

    needl.init(args)
    daemonize(args.logfile, args.pidfile) if args.daemon else start()
项目:Needl    作者:eth0izzle    | 项目源码 | 文件源码
def daemonize(logfile, pidfile):
    needl.log.info('Daemonizing and logging to %s', logfile)

    with daemon.DaemonContext(working_directory=os.getcwd(),
                              stderr=logfile,
                              umask=0o002,
                              pidfile=daemon.pidfile.PIDLockFile(pidfile)) as dc:

        start()
项目:autolux    作者:autolume    | 项目源码 | 文件源码
def run():
  opts.load_options()

  if opts.VIZ_LUMA_MAP:
    import luma_vis
    luma_vis.visualize(models.LUMA_FILE)
  else:
    opts.print_config()
    models.load_luma_observations()
    models.load_luma_map(models.LUMA_FILE)

    if opts.RUN_AS_DAEMON:
        import os
        try:
            import daemon
            import daemon.pidfile
        except:
            print "MISSING DAEMON MODULE. PLEASE INSTALL PYTHON-DAEMON TO ENABLE DAEMON MODE"
            import sys
            sys.exit(1)

        uid = os.getuid()
        lock_file = os.path.join(os.path.sep, "tmp", "autolux.%s.pid" % uid)
        print "RUNNING IN DAEMON MODE"
        print "LOCKFILE", lock_file
        with daemon.DaemonContext(pidfile=daemon.pidfile.PIDLockFile(lock_file)):
            monitor_luma()
    else:
        monitor_luma()
项目:airflow    作者:apache-airflow    | 项目源码 | 文件源码
def scheduler(args):
    print(settings.HEADER)
    job = jobs.SchedulerJob(
        dag_id=args.dag_id,
        subdir=process_subdir(args.subdir),
        run_duration=args.run_duration,
        num_runs=args.num_runs,
        do_pickle=args.do_pickle)

    if args.daemon:
        pid, stdout, stderr, log_file = setup_locations("scheduler", args.pid, args.stdout, args.stderr, args.log_file)
        handle = setup_logging(log_file)
        stdout = open(stdout, 'w+')
        stderr = open(stderr, 'w+')

        ctx = daemon.DaemonContext(
            pidfile=TimeoutPIDLockFile(pid, -1),
            files_preserve=[handle],
            stdout=stdout,
            stderr=stderr,
        )
        with ctx:
            job.run()

        stdout.close()
        stderr.close()
    else:
        signal.signal(signal.SIGINT, sigint_handler)
        signal.signal(signal.SIGTERM, sigint_handler)
        signal.signal(signal.SIGQUIT, sigquit_handler)
        job.run()
项目:airflow    作者:apache-airflow    | 项目源码 | 文件源码
def flower(args):
    broka = conf.get('celery', 'BROKER_URL')
    address = '--address={}'.format(args.hostname)
    port = '--port={}'.format(args.port)
    api = ''
    if args.broker_api:
        api = '--broker_api=' + args.broker_api

    flower_conf = ''
    if args.flower_conf:
        flower_conf = '--conf=' + args.flower_conf

    if args.daemon:
        pid, stdout, stderr, log_file = setup_locations("flower", args.pid, args.stdout, args.stderr, args.log_file)
        stdout = open(stdout, 'w+')
        stderr = open(stderr, 'w+')

        ctx = daemon.DaemonContext(
            pidfile=TimeoutPIDLockFile(pid, -1),
            stdout=stdout,
            stderr=stderr,
        )

        with ctx:
            os.execvp("flower", ['flower', '-b', broka, address, port, api, flower_conf])

        stdout.close()
        stderr.close()
    else:
        signal.signal(signal.SIGINT, sigint_handler)
        signal.signal(signal.SIGTERM, sigint_handler)

        os.execvp("flower", ['flower', '-b', broka, address, port, api, flower_conf])
项目:zoneminder-slack-bot    作者:rjclark    | 项目源码 | 文件源码
def start(self):
        """
        If configured, converts to a daemon. Otherwise start connected to the current console.
        """

        run_as_daemon = self.config.getboolean('Runtime', 'daemon', fallback=False)
        uid = self.config.get('Runtime', 'daemon user', fallback=None)
        gid = self.config.get('Runtime', 'daemon group', fallback=None)
        pidfile = self.config.get('Runtime', 'daemon pid file', fallback=None)

        if uid:
            uid = getpwnam(uid).pw_uid
        if gid:
            gid = getgrnam(gid).gr_gid

        if run_as_daemon:
            LOGGER.info('Starting as a daemon process')
            import daemon
            import daemon.pidfile

            if pidfile:
                # We need to create the enclosing directory (if it does not exist) before
                # starting the daemon context as we might lose the permissions to create
                # that directory
                piddir = os.path.dirname(pidfile)
                if not os.path.isdir(piddir):
                    os.makedirs(piddir, 0o0755)
                    os.chown(piddir, uid, gid)

                # Convert the filename to the appropriate type for the deamon module.
                pidfile = daemon.pidfile.TimeoutPIDLockFile(pidfile)

            with daemon.DaemonContext(uid=uid, gid=gid, pidfile=pidfile):
                self._start()

        self._start()
项目:incubator-airflow-old    作者:apache    | 项目源码 | 文件源码
def worker(args):
    env = os.environ.copy()
    env['AIRFLOW_HOME'] = settings.AIRFLOW_HOME

    # Celery worker
    from airflow.executors.celery_executor import app as celery_app
    from celery.bin import worker

    worker = worker.worker(app=celery_app)
    options = {
        'optimization': 'fair',
        'O': 'fair',
        'queues': args.queues,
        'concurrency': args.concurrency,
        'hostname': args.celery_hostname,
    }

    if args.daemon:
        pid, stdout, stderr, log_file = setup_locations("worker", args.pid, args.stdout, args.stderr, args.log_file)
        handle = setup_logging(log_file)
        stdout = open(stdout, 'w+')
        stderr = open(stderr, 'w+')

        ctx = daemon.DaemonContext(
            pidfile=TimeoutPIDLockFile(pid, -1),
            files_preserve=[handle],
            stdout=stdout,
            stderr=stderr,
        )
        with ctx:
            sp = subprocess.Popen(['airflow', 'serve_logs'], env=env, close_fds=True)
            worker.run(**options)
            sp.kill()

        stdout.close()
        stderr.close()
    else:
        signal.signal(signal.SIGINT, sigint_handler)
        signal.signal(signal.SIGTERM, sigint_handler)

        sp = subprocess.Popen(['airflow', 'serve_logs'], env=env, close_fds=True)

        worker.run(**options)
        sp.kill()
项目:airflow    作者:apache-airflow    | 项目源码 | 文件源码
def worker(args):
    env = os.environ.copy()
    env['AIRFLOW_HOME'] = settings.AIRFLOW_HOME

    # Celery worker
    from airflow.executors.celery_executor import app as celery_app
    from celery.bin import worker

    worker = worker.worker(app=celery_app)
    options = {
        'optimization': 'fair',
        'O': 'fair',
        'queues': args.queues,
        'concurrency': args.concurrency,
    }

    if args.daemon:
        pid, stdout, stderr, log_file = setup_locations("worker", args.pid, args.stdout, args.stderr, args.log_file)
        handle = setup_logging(log_file)
        stdout = open(stdout, 'w+')
        stderr = open(stderr, 'w+')

        ctx = daemon.DaemonContext(
            pidfile=TimeoutPIDLockFile(pid, -1),
            files_preserve=[handle],
            stdout=stdout,
            stderr=stderr,
        )
        with ctx:
            sp = subprocess.Popen(['airflow', 'serve_logs'], env=env)
            worker.run(**options)
            sp.kill()

        stdout.close()
        stderr.close()
    else:
        signal.signal(signal.SIGINT, sigint_handler)
        signal.signal(signal.SIGTERM, sigint_handler)

        sp = subprocess.Popen(['airflow', 'serve_logs'], env=env)

        worker.run(**options)
        sp.kill()