Python celery 模块,Celery() 实例源码

我们从Python开源项目中,提取了以下49个代码示例,用于说明如何使用celery.Celery()

项目:NodeDefender    作者:CTSNE    | 项目源码 | 文件源码
def CreateCelery(app = None):
    app = app or CreateApp()
    if not NodeDefender.config.celery.enabled():
        NodeDefender.logger.info("Concurrency disabled")
        return False

    try:
        celery = Celery(app.name, broker=app.config['CELERY_BROKER_URI'],
                   backend=app.config['CELERY_BACKEND_URI'])
    except KeyError:
        celery = Celery(app.name)
        NodeDefender.logger.info("Concurreny configuration error")
        return False

    celery.conf.update(app.config)
    TaskBase = celery.Task
    class ContextTask(TaskBase):
        abstract = True
        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)
    celery.Task = ContextTask
    NodeDefender.logger.info("Concurrency successfully enabled")
    return celery
项目:flask-vue-example    作者:levi-lq    | 项目源码 | 文件源码
def make_celery(app):
    """
    desc:   ??celery?flask?

    """
    celery = Celery(app.import_name, broker=app.config["CELERY_BROKER_URL"],
                    backend=app.config["CELERY_BACKEND_URL"])
    celery.conf.update(app.config)
    TaskBase = celery.Task

    class ContextTask(TaskBase):
        abstract = True

        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)

    celery.Task = ContextTask
    return celery
项目:akamatsu    作者:rmed    | 项目源码 | 文件源码
def make_celery(app):
    """Create a celery instance for the application."""
    # Celery is optional, import it here rather than globally
    from celery import Celery

    celery_instance = Celery(
        app.import_name,
        backend=app.config['CELERY_RESULT_BACKEND'],
        broker=app.config['CELERY_BROKER_URL']
    )

    celery_instance.conf.update(app.config)
    TaskBase = celery_instance.Task

    class ContextTask(TaskBase):
        abstract = True
        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)

    celery_instance.Task = ContextTask

    return celery_instance
项目:docket    作者:rocknsm    | 项目源码 | 文件源码
def celery(self):
        app = self.flask_app
        celery = Celery(
                app.import_name,
                broker=app.config['CELERY_BROKER_URL'])

        celery.conf.update(app.config)

        TaskBase = celery.Task
        class ContextTask(TaskBase):
            abstract = True

            def _call_(self, *args, **kwargs):
                with app.app_context():
                    return TaskBase.__call__(self, *args, **kwargs)

        celery.Task = ContextTask

        return celery
项目:nanobox-adapter-libcloud    作者:nanobox-io    | 项目源码 | 文件源码
def make_celery(app):
    celery = Celery(app.import_name,
                    backend=app.config['CELERY_RESULT_BACKEND'],
                    broker=app.config['CELERY_BROKER_URL'])
    celery.conf.update(app.config)
    TaskBase = celery.Task

    class ContextTask(TaskBase):
        abstract = True

        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)

    celery.Task = ContextTask
    return celery


# Instantiate Flask app
项目:twopi-flask-utils    作者:TwoPiCode    | 项目源码 | 文件源码
def create_celery(name, config_obj, inject_version=True, **kwargs):
    """Creates a celery app.

    :param config_obj: The configuration object to initiaze with
    :param inject_version: bool: Whether or not to inject the application's
                                 version number. Attempts to get version number
                                using 
                                :func:`twopi_flask_utils.deployment_release.get_release`
    :param kwargs: Other arguments to pass to the ``Celery`` instantiation.
    :returns: An initialized celery application.
    """

    celery = Celery(name, broker=config_obj.CELERY_BROKER_URL, **kwargs)
    celery.config_from_object(config_obj)
    if inject_version:
        celery.version = get_release()

    return celery
项目:apm-agent-python    作者:elastic    | 项目源码 | 文件源码
def flask_celery(flask_apm_client):
    from celery import Celery

    flask_app = flask_apm_client.app
    celery = Celery(flask_app.import_name, backend=None,
                    broker=None)
    celery.conf.update(CELERY_ALWAYS_EAGER=True)
    TaskBase = celery.Task

    class ContextTask(TaskBase):
        abstract = True

        def __call__(self, *args, **kwargs):
            with flask_app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)

    celery.Task = ContextTask
    celery.flask_apm_client = flask_apm_client
    return celery
项目:exchange    作者:boundlessgeo    | 项目源码 | 文件源码
def test_celery(self):
        # get a Celery "connection"
        celery = Celery()

        # This is a test task that returns the number
        # passed into the function.
        @celery.task
        def mirror(x):
            return x

        # some number.
        test_n = 44
        # kick off the celery task
        r = mirror.apply(args=(test_n,)).get()
        # ensure the number comes back.
        self.assertEqual(r, test_n)
项目:infosec_mentors_project    作者:andMYhacks    | 项目源码 | 文件源码
def create_app(config_name):
    app = Flask(__name__)
    app.config.from_object(config_type[config_name])
    config_type[config_name].init_app(app)

    celery = Celery(app.name, broker=app.config['CELERY_BROKER_URL'])
    celery.conf.update(app.config)

    db.init_app(app)
    mail.init_app(app)
    bcrypt.init_app(app)
    login_manager.init_app(app)

    from .main import main as main_blueprint
    app.register_blueprint(main_blueprint)

    return app
项目:Mastering-Flask    作者:PacktPublishing    | 项目源码 | 文件源码
def make_celery(app):
    celery = Celery(
        app.import_name,
        broker=app.config['CELERY_BROKER_URL'],
        backend=app.config['CELERY_BACKEND_URL']
    )
    celery.conf.update(app.config)
    TaskBase = celery.Task

    class ContextTask(TaskBase):
        abstract = True

        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)

    celery.Task = ContextTask

    return celery
项目:Mastering-Flask    作者:PacktPublishing    | 项目源码 | 文件源码
def make_celery(app):
    celery = Celery(
        app.import_name,
        broker=app.config['CELERY_BROKER_URL'],
        backend=app.config['CELERY_BACKEND_URL']
    )
    celery.conf.update(app.config)
    TaskBase = celery.Task

    class ContextTask(TaskBase):
        abstract = True

        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)

    celery.Task = ContextTask

    return celery
项目:Mastering-Flask    作者:PacktPublishing    | 项目源码 | 文件源码
def make_celery(app):
    celery = Celery(
        app.import_name,
        broker=app.config['CELERY_BROKER_URL'],
        backend=app.config['CELERY_BACKEND_URL']
    )
    celery.conf.update(app.config)
    TaskBase = celery.Task

    class ContextTask(TaskBase):
        abstract = True

        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)

    celery.Task = ContextTask

    return celery
项目:Mastering-Flask    作者:PacktPublishing    | 项目源码 | 文件源码
def make_celery(app):
    celery = Celery(
        app.import_name,
        broker=app.config['CELERY_BROKER_URL'],
        backend=app.config['CELERY_BACKEND_URL']
    )
    celery.conf.update(app.config)
    TaskBase = celery.Task

    class ContextTask(TaskBase):
        abstract = True

        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)

    celery.Task = ContextTask

    return celery
项目:Mastering-Flask    作者:PacktPublishing    | 项目源码 | 文件源码
def make_celery(app):
    celery = Celery(
        app.import_name,
        broker=app.config['CELERY_BROKER_URL'],
        backend=app.config['CELERY_BACKEND_URL']
    )
    celery.conf.update(app.config)
    TaskBase = celery.Task

    class ContextTask(TaskBase):
        abstract = True

        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)

    celery.Task = ContextTask

    return celery
项目:gSewa    作者:RazinDangol    | 项目源码 | 文件源码
def make_celery(app):
    celery = Celery(app.import_name, broker=app.config['CELERY_BROKER_URL'])
    celery.conf.update(app.config)
    TaskBase = celery.Task

    class ContextTask(TaskBase):
        abstract = True

        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)
    celery.Task = ContextTask
    return celery



# Making SqlAlchemy instance of the app
项目:nixborg    作者:mayflower    | 项目源码 | 文件源码
def make_celery(app):
    celery = Celery(
        app.import_name,
        backend=app.config['NIXBORG_CELERY_RESULT_BACKEND'],
        broker=app.config['NIXBORG_CELERY_BROKER_URL'],
    )
    celery.conf.update(app.config)
    TaskBase = celery.Task

    class ContextTask(TaskBase):
        abstract = True
        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)

    celery.Task = ContextTask

    return celery
项目:flask-blog    作者:ClayAndMore    | 项目源码 | 文件源码
def make_celery(app):
    celery=Celery(
        app.import_name,
        backend=app.config['CELERY_RESULT_BACKEND'],
        broker=app.config['CELERY_BROKER_URL']
    )

    celery.conf.update(app.config)
    TaskBase=celery.Task

    class ContextTask(TaskBase):
        abstract=True
        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self,*args,**kwargs)

    celery.Task=ContextTask
    return celery
项目:restccnu    作者:restccnu    | 项目源码 | 文件源码
def make_celery(app):
    """
    :function: make_celery
    :args:
        - app: restccnu app
    :rv: celery??

    celery????, ?celery context???flask app context,
    ????flask??
    """
    celery = Celery(app.import_name, broker=app.config['CELERY_BROKER_URL'])
    celery.conf.update(app.config)
    TaskBase = celery.Task
    class ContextTask(TaskBase):
        abstract = True  # abc
        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)
    celery.Task = ContextTask
    return celery
项目:suite    作者:Staffjoy    | 项目源码 | 文件源码
def create_celery_app(app=None):
    """Return a celery app in app context"""
    app = app or create_app(
        os.environ.get("ENV", "prod"), register_blueprints=False)
    celery = Celery(
        app.import_name,
        backend=app.config['CELERY_RESULT_BACKEND'],
        broker=app.config['CELERY_BROKER_URL'])

    celery.conf.update(app.config)
    TaskBase = celery.Task

    class ContextTask(TaskBase):
        abstract = True

        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)

    celery.Task = ContextTask
    return celery
项目:pylogctx    作者:peopledoc    | 项目源码 | 文件源码
def test_task():
    from pylogctx import context

    app = Celery(task_cls=LoggingTask)

    @app.task
    def my_task():
        context.update(taskField='RUNNED')
        logger = get_task_logger(current_task.name)
        logger.info("I log!")
        return context.as_dict()

    result = my_task.apply()
    if VERSION.major < 4:
        result.maybe_reraise()
    else:
        result.maybe_throw()
    fields = result.result
    assert 'taskField' in fields
    assert not context.as_dict()
项目:pylogctx    作者:peopledoc    | 项目源码 | 文件源码
def test_adapter():
    from pylogctx import context, log_adapter

    app = Celery(task_cls='pylogctx.celery.LoggingTask')

    @log_adapter(app.Task)
    def adapter(task):
        return {
            'celeryTaskId': task.request.id,
            'celeryTask': task.name
        }

    @app.task
    def my_task():
        return context.as_dict()

    result = my_task.apply()
    if VERSION.major < 4:
        result.maybe_reraise()
    else:
        result.maybe_throw()

    fields = result.result
    assert 'celeryTask' in fields
    assert 'celeryTaskId' in fields
项目:zeus    作者:getsentry    | 项目源码 | 文件源码
def init_app(self, app, sentry):
        self.app = app
        new_celery = celery.Celery(
            app.import_name,
            broker=app.config['CELERY_BROKER_URL'],
            backend=app.config['CELERY_RESULT_BACKEND'],
        )
        # XXX(dcramer): why the hell am I wasting time trying to make Celery work?
        self.celery.__dict__.update(vars(new_celery))
        self.celery.conf.update(app.config)

        worker_process_init.connect(self._worker_process_init)

        task_postrun.connect(self._task_postrun)
        task_prerun.connect(self._task_prerun)

        if sentry:
            register_signal(sentry.client)
            register_logger_signal(sentry.client)
项目:websauna    作者:websauna    | 项目源码 | 文件源码
def parse_celery_config(celery_config_python: str) -> dict:
    # Expose timedelta object for config to be used in beat schedule
    # http://docs.celeryproject.org/en/master/userguide/periodic-tasks.html#beat-entries
    from datetime import timedelta  # noqa
    from celery.schedules import crontab  # noqa

    _globals = globals().copy()
    _locals = locals().copy()
    code = textwrap.dedent(celery_config_python)

    try:
        config_dict = eval(code, _globals, _locals)
    except Exception as e:
        raise RuntimeError("Could not execute Python code to produce Celery configuration object: {}".format(code)) from e

    if "broker_url" not in config_dict:
        raise RuntimeError("Mandatory broker_url Celery setting missing. Did we fail to parse config? {}".format(config_dict))

    return config_dict
项目:websauna    作者:websauna    | 项目源码 | 文件源码
def get_celery_config(registry: Registry) -> dict:
    """Load Celery configuration from settings.

    You need to have a setting key ``celery_config_python``. This is Python code to configure Celery. The code is executed and all locals are passed to Celery app.

    More information:

        * http://docs.celeryproject.org/en/master/userguide/configuration.html

    :param registry: Pyramid registry from where we read the Celery configuratino
    :return: An object holding Celery configuration variables
    """

    celery_config_python = registry.settings.get('websauna.celery_config')
    if not celery_config_python:
        raise RuntimeError('Using Celery with Websauna requires you to have celery_config_python configuration variable')

    return parse_celery_config(celery_config_python)
项目:websauna    作者:websauna    | 项目源码 | 文件源码
def get_celery(registry: Registry):
    """Load and configure Celery app.

    Cache the loaded Celery app object on registry.

    :param registry: Use registry settings to load Celery
    """
    celery = getattr(registry, "celery", None)
    if not celery:
        celery = registry.celery = Celery()
        celery.conf.update(get_celery_config(registry))

        # Expose Pyramid registry to Celery app and tasks
        celery.registry = registry

    return celery
项目:news    作者:kuc2477    | 项目源码 | 文件源码
def update(self, schedule):
        """Update the registered schedule by reconciliating with database
        backend.

        :param schedule: Schedule to synchronize with the database.
        :type schedule: :class:`~news.models.AbstractSchedule` implementation

        """
        if isinstance(schedule, int):
            schedule = self.backend.get_schedule_by_id(schedule)

        # log
        self._log('Updating schedule {}'.format(
            schedule if isinstance(schedule, int) else schedule.id))

        # remove schedule from job queue and add it if it's now enabled
        self.remove_schedule(schedule)
        if schedule.enabled:
            self.add_schedule(schedule)

    # ==================
    # Celery integration
    # ==================
项目:fabric8-analytics-worker    作者:fabric8-analytics    | 项目源码 | 文件源码
def init_celery(app=None, result_backend=True):
    """Init Celery configuration.

    :param app: celery configuration, if omitted, application will be instantiated
    :param result_backend: True if Celery should connect to result backend
    """
    # Keep this for debugging purposes for now
    _logger.debug(">>> Selinon version is %s" % selinon_version)
    _logger.debug(">>> Selinonlib version is %s" % selinonlib_version)
    _logger.debug(">>> Celery version is %s" % celery_version)

    if not result_backend:
        CelerySettings.disable_result_backend()

    if app is None:
        app = Celery('tasks')
        app.config_from_object(CelerySettings)
    else:
        app.config_from_object(CelerySettings)
项目:jenova    作者:inova-tecnologias    | 项目源码 | 文件源码
def create_celery_app(app=None):
  app = app or create_app()
  celery = Celery(__name__, broker=app.config['CELERY_BROKER_URL'])
  celery.conf.update(app.config)
  TaskBase = celery.Task

  class ContextTask(TaskBase):
    abstract = True

    def __call__(self, *args, **kwargs):
      with app.app_context():
        return TaskBase.__call__(self, *args, **kwargs)

  celery.Task = ContextTask
  celery.app = app
  return celery
项目:Closed-Track-Scoring    作者:tpmullan    | 项目源码 | 文件源码
def ready(self):
        # Using a string here means the worker will not have to
        # pickle the object when using Windows.
        app.config_from_object('django.conf:settings')
        installed_apps = [app_config.name for app_config in apps.get_app_configs()]
        app.autodiscover_tasks(lambda: installed_apps, force=True)

        if hasattr(settings, 'RAVEN_CONFIG'):
            # Celery signal registration

            from raven import Client as RavenClient
            from raven.contrib.celery import register_signal as raven_register_signal
            from raven.contrib.celery import register_logger_signal as raven_register_logger_signal


            raven_client = RavenClient(dsn=settings.RAVEN_CONFIG['DSN'])
            raven_register_logger_signal(raven_client)
            raven_register_signal(raven_client)
项目:dramatiq    作者:Bogdanp    | 项目源码 | 文件源码
def parse_args():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        "--benchmark", help="the benchmark to run",
        type=benchmark_arg, default="latency",
    )
    parser.add_argument(
        "--count", help="the number of messages to benchmark with",
        type=int, default=10000,
    )
    parser.add_argument(
        "--use-green-threads", help="run workers with green threads rather than system threads",
        action="store_true", default=False,
    )
    parser.add_argument(
        "--use-celery", help="run the benchmark under Celery",
        action="store_true", default=False,
    )
    return parser.parse_args()
项目:cookiecutter-django    作者:asyncee    | 项目源码 | 文件源码
def ready(self):
        # Using a string here means the worker will not have to
        # pickle the object when using Windows.
        app.config_from_object('django.conf:settings')
        installed_apps = [app_config.name for app_config in apps.get_app_configs()]
        app.autodiscover_tasks(lambda: installed_apps, force=True)

        {% if cookiecutter.use_sentry_for_error_reporting == 'y' -%}
        if hasattr(settings, 'RAVEN_CONFIG'):
            # Celery signal registration
            from raven import Client as RavenClient
            from raven.contrib.celery import register_signal as raven_register_signal
            from raven.contrib.celery import register_logger_signal as raven_register_logger_signal

            raven_client = RavenClient(dsn=settings.RAVEN_CONFIG['DSN'])
            raven_register_logger_signal(raven_client)
            raven_register_signal(raven_client)
        {%- endif %}
项目:kostyor    作者:Mirantis    | 项目源码 | 文件源码
def create_app(conf):
    app = celery.Celery()

    for option, value in conf.rpc.items():
        # celery options are upper-cased, just like in flask
        app.conf[option.upper()] = value

    # Each driver may use own set of Celery tasks, so in order to run them
    # the Celery worker has to load them into memory. That's why we need to
    # iterate over Kostyor drivers and lazily load them into memory. The
    # important note here is that we can't use stevedore because it tries
    # to load found module immediately which leads to cyclic import error
    # (drivers import a celery app in order to define own tasks).
    namespaces = (
        'kostyor.upgrades.drivers',
        'kostyor.discovery_drivers',
    )

    for namespace in namespaces:
        for ep in pkg_resources.iter_entry_points(namespace):
            package, module = ep.module_name.rsplit('.', 1)
            app.autodiscover_tasks([package], module)

    return app
项目:cs4300sp2017-finalproject    作者:AN313    | 项目源码 | 文件源码
def make_celery(app):
    celery = Celery(
        app.import_name,
        backend=app.config['CELERY_RESULT_BACKEND'],
        broker=app.config['CELERY_BROKER_URL']
    )
    celery.conf.update(app.config)
    TaskBase = celery.Task

    class ContextTask(TaskBase):
        abstract = True

        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)
    celery.Task = ContextTask
    return celery
项目:defplorex    作者:trendmicro    | 项目源码 | 文件源码
def _setup_logging(loglevel=logging.WARN, **kwargs):
    from loggers import config_logger
    config_logger(level=loglevel)

    log.debug(
            'Configuring Celery on broker = %s,'
            ' result_backend = %s, timezone = %s',
            broker_url, result_backend, timezone)
项目:meg-server    作者:Argonne-National-Laboratory    | 项目源码 | 文件源码
def setup(self):
        self.celery = Celery()
        dirname = os.path.dirname(__file__)
        self.cfg = YAMLConfigFile(os.path.join(dirname, "../config.default.yml"))
项目:meg-server    作者:Argonne-National-Laboratory    | 项目源码 | 文件源码
def create_and_configure_celery(app):
    broker_url = "sqla+{}".format(app.config["SQLALCHEMY_DATABASE_URI"])
    result_backend = broker_url.replace("sqla+", "db+")
    celery = Celery("tasks", backend=result_backend, broker=broker_url)
    return celery
项目:django-tmpl    作者:jarrekk    | 项目源码 | 文件源码
def shared_task_email(func):
    """
    Replacement for @shared_task decorator that emails admins if an exception is raised.
    """
    @wraps(func)
    def new_func(*args, **kwargs):
        try:
            return func(*args, **kwargs)
        except:
            subject = "Celery task failure"
            message = traceback.format_exc()
            mail_admins(subject, message)
            raise
    return shared_task(new_func)
项目:girder_worker    作者:girder    | 项目源码 | 文件源码
def check_celery_version(*args, **kwargs):
    if LooseVersion(__version__) < LooseVersion('4.0.0'):
        sys.exit("""You are running Celery {}.

girder-worker requires celery>=4.0.0""".format(__version__))
项目:zinc    作者:PressLabs    | 项目源码 | 文件源码
def _configure_sentry(self, raven_config):
        import raven
        from raven.contrib.celery import (register_signal,
                                          register_logger_signal)
        client = raven.Client(**raven_config)

        # register a custom filter to filter out duplicate logs
        register_logger_signal(client)

        # hook into the Celery error handler
        register_signal(client)
项目:talisker    作者:canonical-ols    | 项目源码 | 文件源码
def celery_app():
    # reregister all the signals and sentry clients
    talisker.celery.enable_signals()
    celery.signals.before_task_publish.connect(before_task_publish)
    celery.signals.task_prerun.connect(task_prerun)

    yield celery.Celery(broker='memory://localhost/')

    celery.signals.before_task_publish.disconnect(before_task_publish)
    celery.signals.task_prerun.disconnect(task_prerun)
    talisker.celery.disable_signals()
项目:cookiecutter-django-reactjs    作者:genomics-geek    | 项目源码 | 文件源码
def ready(self):
        # Using a string here means the worker will not have to
        # pickle the object when using Windows.
        app.config_from_object('django.conf:settings')
        installed_apps = [app_config.name for app_config in apps.get_app_configs()]
        app.autodiscover_tasks(lambda: installed_apps, force=True)

        {% if cookiecutter.use_sentry_for_error_reporting == 'y' -%}
        if hasattr(settings, 'RAVEN_CONFIG'):
            # Celery signal registration
项目:drift    作者:dgnorth    | 项目源码 | 文件源码
def make_celery(app):

    kombu.serialization.register(
        'drift_celery_json', 
        drift_celery_dumps, drift_celery_loads, 
        content_type='application/x-myjson', content_encoding='utf-8'
    ) 

    celery = Celery(app.import_name)

    # if BROKER_URL is not set use the redis server
    BROKER_URL = app.config.get("BROKER_URL", None)
    if not BROKER_URL:
        BROKER_URL = "redis://{}:6379/{}".format(app.config.get("redis_server"), CELERY_DB_NUMBER)
        log.info("Using redis for celery broker: %s", BROKER_URL)
    else:
        log.info("celery broker set in config: %s", BROKER_URL)

    celery.conf.update(app.config)
    celery.conf["BROKER_URL"] = BROKER_URL
    celery.conf["CELERY_RESULT_BACKEND"] = BROKER_URL
    celery.conf["CELERY_TASK_SERIALIZER"] = "drift_celery_json"
    celery.conf["CELERY_RESULT_SERIALIZER"] = "drift_celery_json"
    celery.conf["CELERY_ACCEPT_CONTENT"] = ["drift_celery_json"]
    celery.conf["CELERY_ENABLE_UTC"] = True
    TaskBase = celery.Task

    class ContextTask(TaskBase):
        abstract = True

        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)
    celery.Task = ContextTask
    return celery


# custom json encoder for datetime object serialization
# from http://stackoverflow.com/questions/21631878/celery-is-there-a-way-to-write-custom-json-encoder-decoder
项目:PrivacyScore    作者:PrivacyScore    | 项目源码 | 文件源码
def getBroker(module):
    app = Celery(module, broker=config.CELERY_BROKER, backend=config.CELERY_BACKEND)
    app.conf.update(
        task_serializer='json',
        accept_content=['json'],  # Ignore other content
        result_serializer='json',
        timezone='Europe/Berlin',
        enable_utc=True,
    )
    app.conf.task_routes = {
        'scanner.scan_connector.scan_site':  {'queue': 'scan-browser'},
        'scanner.db_connector.*':            {'queue': 'db-mongo-access'},
        'scanner.externaltests_connector.*': {'queue': 'scan-external'}
    }
    return app
项目:demo    作者:selinon    | 项目源码 | 文件源码
def init(with_result_backend=False):
    """ Init Celery and Selinon

    :param with_result_backend: true if the application should connect to the result backend
    :return: Celery application instance
    """
    conf = {
        'broker_url': os.environ.get('BROKER_URL', 'amqp://broker:5672'),
    }

    if with_result_backend:
        conf['result_backend'] = os.environ.get('RESULT_BACKEND_URL', 'redis://redis:6379/0')

    app = Celery('myapp')
    app.config_from_object(conf)

    flow_definition_files = []
    # Add all config files for flows
    for conf_file in os.listdir(os.path.join(_BASE_NAME, 'flows')):
        if conf_file.endswith('.yaml') and not conf_file.startswith('.'):
            flow_definition_files.append(os.path.join(_BASE_NAME, 'flows', conf_file))

    # Set Selinon configuration
    Config.set_config_yaml(os.path.join(_BASE_NAME, 'nodes.yaml'), flow_definition_files)
    # Prepare Celery
    Config.set_celery_app(app)

    return app
项目:flask_ishuhui    作者:lufficc    | 项目源码 | 文件源码
def create_celery(app):
    celery = Celery(app.import_name, backend=app.config['CELERY_RESULT_BACKEND'], broker=app.config['CELERY_BROKER_URL'])
    celery.conf.update(app.config)
    return celery
项目:deven    作者:b3h3rkz    | 项目源码 | 文件源码
def ready(self):
        # Using a string here means the worker will not have to
        # pickle the object when using Windows.
        app.config_from_object('django.conf:settings')
        app.autodiscover_tasks(lambda: settings.INSTALLED_APPS, force=True)

        if hasattr(settings, 'RAVEN_CONFIG'):
            # Celery signal registration
            from raven import Client as RavenClient
            from raven.contrib.celery import register_signal as raven_register_signal
            from raven.contrib.celery import register_logger_signal as raven_register_logger_signal

            raven_client = RavenClient(dsn=settings.RAVEN_CONFIG['DSN'])
            raven_register_logger_signal(raven_client)
            raven_register_signal(raven_client)
项目:JmilkFan-s-Blog    作者:JmilkFan    | 项目源码 | 文件源码
def make_celery(app):
    """Create the celery process."""

    # Init the celery object via app's configuration.
    celery = Celery(
        app.import_name,
        backend=app.config['CELERY_RESULT_BACKEND'],
        broker=app.config['CELERY_BROKER_URL'])

    # Flask-Celery-Helpwe to auto-setup the config.
    celery.conf.update(app.config)
    TaskBase = celery.Task

    class ContextTask(TaskBase):

        abstract = True

        def __call__(self, *args, **kwargs):
            """Will be execute when create the instance object of ContextTesk.
            """

            # Will context(Flask's Extends) of app object(Producer Sit)
            # be included in celery object(Consumer Site).
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)

    # Include the app_context into celery.Task.
    # Let other Flask extensions can be normal calls.
    celery.Task = ContextTask
    return celery
项目:guides-cms    作者:pluralsight    | 项目源码 | 文件源码
def make_celery(app):
    celery = Celery(app.import_name, broker=app.config['CELERY_BROKER_URL'])
    celery.conf.update(app.config)
    TaskBase = celery.Task

    class ContextTask(TaskBase):
        abstract = True
        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)

    celery.Task = ContextTask
    return celery
项目:flask_example    作者:flyhigher139    | 项目源码 | 文件源码
def make_celery(app):
    celery = Celery(app.import_name, broker=app.config['BROKER_URL'])
    celery.conf.update(app.config)
    TaskBase = celery.Task
    class ContextTask(TaskBase):
        abstract = True
        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)
    celery.Task = ContextTask
    return celery