Python django.db 模块,DatabaseError() 实例源码

我们从Python开源项目中,提取了以下49个代码示例,用于说明如何使用django.db.DatabaseError()

项目:NarshaTech    作者:KimJangHyeon    | 项目源码 | 文件源码
def save(self, must_create=False):
        """
        Saves the current session data to the database. If 'must_create' is
        True, a database error will be raised if the saving operation doesn't
        create a *new* entry (as opposed to possibly updating an existing
        entry).
        """
        if self.session_key is None:
            return self.create()
        data = self._get_session(no_load=must_create)
        obj = self.create_model_instance(data)
        using = router.db_for_write(self.model, instance=obj)
        try:
            with transaction.atomic(using=using):
                obj.save(force_insert=must_create, force_update=not must_create, using=using)
        except IntegrityError:
            if must_create:
                raise CreateError
            raise
        except DatabaseError:
            if not must_create:
                raise UpdateError
            raise
项目:Scrum    作者:prakharchoudhary    | 项目源码 | 文件源码
def save(self, must_create=False):
        """
        Saves the current session data to the database. If 'must_create' is
        True, a database error will be raised if the saving operation doesn't
        create a *new* entry (as opposed to possibly updating an existing
        entry).
        """
        if self.session_key is None:
            return self.create()
        data = self._get_session(no_load=must_create)
        obj = self.create_model_instance(data)
        using = router.db_for_write(self.model, instance=obj)
        try:
            with transaction.atomic(using=using):
                obj.save(force_insert=must_create, force_update=not must_create, using=using)
        except IntegrityError:
            if must_create:
                raise CreateError
            raise
        except DatabaseError:
            if not must_create:
                raise UpdateError
            raise
项目:django-livesettings3    作者:kunaldeo    | 项目源码 | 文件源码
def _safe_get_siteid(site):
    global is_site_initializing, is_first_warn
    if not site:
        try:
            site = Site.objects.get_current()
            siteid = site.id
        except Exception as e:
            if is_site_initializing and isinstance(e, DatabaseError) and str(e).find('django_site') > -1:
                if is_first_warn:
                    log.warn(str(e).strip())
                    is_first_warn = False
                log.warn('Can not get siteid; probably before syncdb; ROLLBACK')
                connection._rollback()
            else:
                is_site_initializing = False
            siteid = settings.SITE_ID
        else:
            is_site_initializing = False
    else:
        siteid = site.id
    return siteid
项目:django    作者:alexsukhrin    | 项目源码 | 文件源码
def save(self, must_create=False):
        """
        Saves the current session data to the database. If 'must_create' is
        True, a database error will be raised if the saving operation doesn't
        create a *new* entry (as opposed to possibly updating an existing
        entry).
        """
        if self.session_key is None:
            return self.create()
        data = self._get_session(no_load=must_create)
        obj = self.create_model_instance(data)
        using = router.db_for_write(self.model, instance=obj)
        try:
            with transaction.atomic(using=using):
                obj.save(force_insert=must_create, force_update=not must_create, using=using)
        except IntegrityError:
            if must_create:
                raise CreateError
            raise
        except DatabaseError:
            if not must_create:
                raise UpdateError
            raise
项目:Gypsy    作者:benticarlos    | 项目源码 | 文件源码
def save(self, must_create=False):
        """
        Saves the current session data to the database. If 'must_create' is
        True, a database error will be raised if the saving operation doesn't
        create a *new* entry (as opposed to possibly updating an existing
        entry).
        """
        if self.session_key is None:
            return self.create()
        data = self._get_session(no_load=must_create)
        obj = self.create_model_instance(data)
        using = router.db_for_write(self.model, instance=obj)
        try:
            with transaction.atomic(using=using):
                obj.save(force_insert=must_create, force_update=not must_create, using=using)
        except IntegrityError:
            if must_create:
                raise CreateError
            raise
        except DatabaseError:
            if not must_create:
                raise UpdateError
            raise
项目:ims    作者:ims-team    | 项目源码 | 文件源码
def save(self, must_create=False):
        """
        Saves the current session data to the database. If 'must_create' is
        True, a database error will be raised if the saving operation doesn't
        create a *new* entry (as opposed to possibly updating an existing
        entry).
        """
        if self.session_key is None:
            return self.create()
        data = self._get_session(no_load=must_create)
        obj = self.create_model_instance(data)
        using = router.db_for_write(self.model, instance=obj)
        try:
            with transaction.atomic(using=using):
                obj.save(force_insert=must_create, force_update=not must_create, using=using)
        except IntegrityError:
            if must_create:
                raise CreateError
            raise
        except DatabaseError:
            if not must_create:
                raise UpdateError
            raise
项目:lifesoundtrack    作者:MTG    | 项目源码 | 文件源码
def save(self, must_create=False):
        """
        Saves the current session data to the database. If 'must_create' is
        True, a database error will be raised if the saving operation doesn't
        create a *new* entry (as opposed to possibly updating an existing
        entry).
        """
        if self.session_key is None:
            return self.create()
        data = self._get_session(no_load=must_create)
        obj = self.create_model_instance(data)
        using = router.db_for_write(self.model, instance=obj)
        try:
            with transaction.atomic(using=using):
                obj.save(force_insert=must_create, force_update=not must_create, using=using)
        except IntegrityError:
            if must_create:
                raise CreateError
            raise
        except DatabaseError:
            if not must_create:
                raise UpdateError
            raise
项目:django-open-lecture    作者:DmLitov4    | 项目源码 | 文件源码
def save(self, must_create=False):
        """
        Saves the current session data to the database. If 'must_create' is
        True, a database error will be raised if the saving operation doesn't
        create a *new* entry (as opposed to possibly updating an existing
        entry).
        """
        if self.session_key is None:
            return self.create()
        data = self._get_session(no_load=must_create)
        obj = self.create_model_instance(data)
        using = router.db_for_write(self.model, instance=obj)
        try:
            with transaction.atomic(using=using):
                obj.save(force_insert=must_create, force_update=not must_create, using=using)
        except IntegrityError:
            if must_create:
                raise CreateError
            raise
        except DatabaseError:
            if not must_create:
                raise UpdateError
            raise
项目:travlr    作者:gauravkulkarni96    | 项目源码 | 文件源码
def save(self, must_create=False):
        """
        Saves the current session data to the database. If 'must_create' is
        True, a database error will be raised if the saving operation doesn't
        create a *new* entry (as opposed to possibly updating an existing
        entry).
        """
        if self.session_key is None:
            return self.create()
        data = self._get_session(no_load=must_create)
        obj = self.create_model_instance(data)
        using = router.db_for_write(self.model, instance=obj)
        try:
            with transaction.atomic(using=using):
                obj.save(force_insert=must_create, force_update=not must_create, using=using)
        except IntegrityError:
            if must_create:
                raise CreateError
            raise
        except DatabaseError:
            if not must_create:
                raise UpdateError
            raise
项目:montage    作者:storyful    | 项目源码 | 文件源码
def denormalise_project(project_id):
    """
        Denormalise data onto a project and save
    """
    project = _get_obj_silent_fail(
        Project.all_objects, pk=project_id
    )

    if not project:
        return

    project.video_tag_instance_count = (
        VideoTagInstance.objects
        .filter(video_tag__project_id=project_id)
        .count()
    )

    with inhibit_signals(Project):
        try:
            project.save(update_fields=["video_tag_instance_count"])
        except DatabaseError:
            pass
项目:c3nav    作者:c3nav    | 项目源码 | 文件源码
def handle(self, *args, **options):
        from c3nav.mapdata.models import MapUpdate

        logger = logging.getLogger('c3nav')

        MapUpdate.objects.create(type='management')
        logger.info('New management update created.')

        if options['include_history']:
            logger.info('Deleting base history...')
            for filename in os.listdir(settings.CACHE_ROOT):
                if filename.startswith('history_base_'):
                    logger.info('Deleting %s...' % filename)
                    os.remove(os.path.join(settings.CACHE_ROOT, filename))
            logger.info('Base history deleted.')

        if not settings.HAS_CELERY:
            print(_('You don\'t have celery installed, so we will run processupdates now...'))
            try:
                process_map_updates()
            except DatabaseError:
                logger.error('Didn\'t work, there is already map update processing in progress.')

        if not settings.HAS_REAL_CACHE:
            print(_('You have no external cache configured, so don\'t forget to restart your c3nav instance!'))
项目:liberator    作者:libscie    | 项目源码 | 文件源码
def save(self, must_create=False):
        """
        Saves the current session data to the database. If 'must_create' is
        True, a database error will be raised if the saving operation doesn't
        create a *new* entry (as opposed to possibly updating an existing
        entry).
        """
        if self.session_key is None:
            return self.create()
        data = self._get_session(no_load=must_create)
        obj = self.create_model_instance(data)
        using = router.db_for_write(self.model, instance=obj)
        try:
            with transaction.atomic(using=using):
                obj.save(force_insert=must_create, force_update=not must_create, using=using)
        except IntegrityError:
            if must_create:
                raise CreateError
            raise
        except DatabaseError:
            if not must_create:
                raise UpdateError
            raise
项目:nav    作者:UNINETT    | 项目源码 | 文件源码
def get_prefix_info(addr):
    """Returns the smallest prefix from the NAVdb that an IP address fits into.

    :param addr: An IP address string.
    :returns: A Prefix object or None if no prefixes matched.

    """
    try:
        return Prefix.objects.select_related().extra(
            select={"mask_size": "masklen(netaddr)"},
            where=["%s << netaddr AND nettype <> 'scope'"],
            order_by=["-mask_size"],
            params=[addr]
        )[0]
    except (IndexError, DatabaseError):
        return None
项目:nav    作者:UNINETT    | 项目源码 | 文件源码
def send(self, address, alert, language='en'):
        if address.account.has_perm('alert_by', 'sms'):
            message = self.get_message(alert, language, 'sms')

            if not address.DEBUG_MODE:
                try:
                    SMSQueue.objects.create(account=address.account,
                                            message=message,
                                            severity=alert.severity,
                                            phone=address.address)
                except (DatabaseError, IntegrityError) as err:
                    raise DispatcherException(
                        "Couldn't add sms to queue: %s" % err)
            else:
                _logger.debug('alert %d: In testing mode, would have added '
                              'message to sms queue for user %s at %s',
                              alert.id, address.account, address.address)
        else:
            _logger.warning('alert %d: %s does not have SMS privileges',
                            alert.id, address.account)
项目:django-oauth2-test    作者:ONSdigital    | 项目源码 | 文件源码
def delete(self, request):
        """
        Take the user object from the request and call the 'delete' method on it if it exists in the DB.
        If this succeeds then we can report a success.

        :param request:
        :param args:
        :param kwargs:
        :return: Serialised JSON Response Object to indicate the resource has been created
        """

        stdlogger.debug("Hitting HTTP DELETE account view")
        user_email = request.user.email
        try:
            stdlogger.debug("Deleting this user object")
            request.user.delete()
        except (IntegrityError, InternalError, DataError, DatabaseError):
            # The chances of this happening are slim to none! And this line of code should never happen. So we really
            # need to tell the other system we are not capable of creating the resource.
            raise DatabaseFailureException

        context = {'account': user_email, 'deleted': 'success'}
        json_context = JSONRenderer().render(context)

        return Response(data=json_context, status=status.HTTP_201_CREATED,)
项目:CSCE482-WordcloudPlus    作者:ggaytan00    | 项目源码 | 文件源码
def save(self, must_create=False):
        """
        Saves the current session data to the database. If 'must_create' is
        True, a database error will be raised if the saving operation doesn't
        create a *new* entry (as opposed to possibly updating an existing
        entry).
        """
        if self.session_key is None:
            return self.create()
        data = self._get_session(no_load=must_create)
        obj = self.create_model_instance(data)
        using = router.db_for_write(self.model, instance=obj)
        try:
            with transaction.atomic(using=using):
                obj.save(force_insert=must_create, force_update=not must_create, using=using)
        except IntegrityError:
            if must_create:
                raise CreateError
            raise
        except DatabaseError:
            if not must_create:
                raise UpdateError
            raise
项目:producthunt    作者:davidgengler    | 项目源码 | 文件源码
def save(self, must_create=False):
        """
        Saves the current session data to the database. If 'must_create' is
        True, a database error will be raised if the saving operation doesn't
        create a *new* entry (as opposed to possibly updating an existing
        entry).
        """
        if self.session_key is None:
            return self.create()
        data = self._get_session(no_load=must_create)
        obj = self.create_model_instance(data)
        using = router.db_for_write(self.model, instance=obj)
        try:
            with transaction.atomic(using=using):
                obj.save(force_insert=must_create, force_update=not must_create, using=using)
        except IntegrityError:
            if must_create:
                raise CreateError
            raise
        except DatabaseError:
            if not must_create:
                raise UpdateError
            raise
项目:wger-lycan-clan    作者:andela    | 项目源码 | 文件源码
def database_exists():
    """Detect if the database exists"""

    # can't be imported in global scope as they already require
    # the settings module during import
    from django.db import DatabaseError
    from django.core.exceptions import ImproperlyConfigured
    from wger.manager.models import User

    try:
        # TODO: Use another model, the User could be deactivated
        User.objects.count()
    except DatabaseError:
        return False
    except ImproperlyConfigured:
        print("Your settings file seems broken")
        sys.exit(0)
    else:
        return True
项目:django-rtc    作者:scifiswapnil    | 项目源码 | 文件源码
def save(self, must_create=False):
        """
        Saves the current session data to the database. If 'must_create' is
        True, a database error will be raised if the saving operation doesn't
        create a *new* entry (as opposed to possibly updating an existing
        entry).
        """
        if self.session_key is None:
            return self.create()
        data = self._get_session(no_load=must_create)
        obj = self.create_model_instance(data)
        using = router.db_for_write(self.model, instance=obj)
        try:
            with transaction.atomic(using=using):
                obj.save(force_insert=must_create, force_update=not must_create, using=using)
        except IntegrityError:
            if must_create:
                raise CreateError
            raise
        except DatabaseError:
            if not must_create:
                raise UpdateError
            raise
项目:alterchef    作者:libremesh    | 项目源码 | 文件源码
def process_jobs(cls, sync=False):
        try:
            started = FwJob.objects.filter(status="STARTED")
            waiting = FwJob.objects.filter(status="WAITING")
        except DatabaseError:
            return
        if not started and waiting:
            job = waiting[0]
            job.status = "STARTED"
            job.profile.write_to_disk()
            commands = cls.make_commands(job.profile.network.slug,
                                         job.profile.name,
                                         job.job_data["devices"],
                                         job.job_data["revision"])
            job.job_data["commands"] = commands
            job.save()
            job.process(sync)  # runs in another thread
项目:LatinSounds_AppEnviaMail    作者:G3ek-aR    | 项目源码 | 文件源码
def save(self, must_create=False):
        """
        Saves the current session data to the database. If 'must_create' is
        True, a database error will be raised if the saving operation doesn't
        create a *new* entry (as opposed to possibly updating an existing
        entry).
        """
        if self.session_key is None:
            return self.create()
        data = self._get_session(no_load=must_create)
        obj = self.create_model_instance(data)
        using = router.db_for_write(self.model, instance=obj)
        try:
            with transaction.atomic(using=using):
                obj.save(force_insert=must_create, force_update=not must_create, using=using)
        except IntegrityError:
            if must_create:
                raise CreateError
            raise
        except DatabaseError:
            if not must_create:
                raise UpdateError
            raise
项目:apm-agent-python    作者:elastic    | 项目源码 | 文件源码
def get_user_info(self, request):
        user_info = {}

        if not hasattr(request, 'user'):
            return user_info
        try:
            user = request.user
            if hasattr(user, 'is_authenticated'):
                if callable(user.is_authenticated):
                    user_info['is_authenticated'] = user.is_authenticated()
                else:
                    user_info['is_authenticated'] = bool(user.is_authenticated)
            if hasattr(user, 'id'):
                user_info['id'] = user.id
            if hasattr(user, 'get_username'):
                user_info['username'] = user.get_username()
            elif hasattr(user, 'username'):
                user_info['username'] = user.username

            if hasattr(user, 'email'):
                user_info['email'] = user.email
        except DatabaseError:
            # If the connection is closed or similar, we'll just skip this
            return {}

        return user_info
项目:DRF-tutorial    作者:TangentSolutions    | 项目源码 | 文件源码
def test_determine_db_status(self, mock_query):
        """Health should not be ok if it cannot connect to the db"""

        mock_query.side_effect = DatabaseError()
        url = reverse('health-list')
        response = self.c.get(url)

        status = response.json().get("status", {})
        db_status = status.get('db')
        assert db_status == 'down', \
            'Expect DB to be down. Got: {}' . format (db_status)

        status = status.get('status')        
        assert status == 'down', \
            'Expect status to be down. Got: {}' . format (status)
项目:montage    作者:storyful    | 项目源码 | 文件源码
def denormalise_video(video_id):
    """
        Denormalise data onto a video and save
    """
    video = _get_obj_silent_fail(
        Video.all_objects
        .only("pk", "recorded_date_overridden", "location_overridden")
        .with_watch_count_real()
        .with_tag_count_real()
        .with_tag_instance_count_real()
        .with_duplicate_count_real(),
        pk=video_id)

    if not video:
        return

    video.watch_count = video.watch_count_real
    video.tag_count = video.video_tag_count_real
    video.tag_instance_count = video.video_tag_instance_count_real
    video.duplicate_count = video.duplicate_count_real

    with inhibit_signals(Video):
        try:
            video.save(update_fields=[
                "watch_count",
                "tag_count",
                "tag_instance_count",
                "duplicate_count"])
        except DatabaseError:
            pass
项目:montage    作者:storyful    | 项目源码 | 文件源码
def test_denormalize_video_exception(self, mock_save, mock_get_obj):
        project = milkman.deliver(Project)
        video = self.create_video(
            project=project,
            watch_count=5,
            tag_count=10,
            tag_instance_count=20,
            duplicate_count=3)
        self.assertEqual(video.watch_count, 5)
        self.assertEqual(video.tag_count, 10)
        self.assertEqual(video.tag_instance_count, 20)
        self.assertEqual(video.duplicate_count, 3)

        # set mock return value to that of the actual video (since this is
        # what the _get_obj_silent_fail method would return anyway). Then we
        # mock out and fake the 'real counts' that would have been returned by
        # the query so that it looks like the counts are out of date.
        video.watch_count_real = 10
        video.video_tag_count_real = 12
        video.video_tag_instance_count_real = 21
        video.duplicate_count_real = 4
        mock_get_obj.return_value = video

        # now mock the response of the save to be an exception - this might
        # happen if the video is removed between the video being retrieved
        # and updated with new counts
        mock_save.side_effect = DatabaseError()

        # run the denormalisation and check the exception is swallowed
        method_resp = denormalise_video(video.pk)
        self.assertEqual(None, method_resp)
项目:montage    作者:storyful    | 项目源码 | 文件源码
def test_denormalize_project_exception(self, mock_qs_count, mock_save):
        project = milkman.deliver(Project, video_tag_instance_count=0)
        mock_qs_count.return_value = 5

        # now mock the response of the save to be an exception - this might
        # happen if the project is removed between the project being retrieved
        # and updated with new counts
        mock_save.side_effect = DatabaseError()

        # run the denormalisation and check the exception is swallowed
        method_resp = denormalise_project(project.pk)
        self.assertEqual(None, method_resp)
项目:c3nav    作者:c3nav    | 项目源码 | 文件源码
def process_map_updates(self):
    if self.request.called_directly:
        logger.info('Processing map updates by direct command...')
    else:
        logger.info('Processing map updates...')

    from c3nav.mapdata.models import MapUpdate
    try:
        try:
            updates = MapUpdate.process_updates()
        except DatabaseError:
            if self.request.called_directly:
                raise
            logger.info('Processing is already running, retrying in 30 seconds.')
            raise self.retry(countdown=30)
    except MaxRetriesExceededError:
        logger.info('Cannot retry, retries exceeded. Exiting.')
        return

    if updates:
        print()

    logger.info(ungettext_lazy('%d map update processed.', '%d map updates processed.', len(updates)) % len(updates))

    if updates:
        logger.info(_('Last processed update: %(date)s (#%(id)d)') % {
            'date': date_format(updates[-1].datetime, 'DATETIME_FORMAT'),
            'id': updates[-1].pk,
        })
项目:c3nav    作者:c3nav    | 项目源码 | 文件源码
def handle(self, *args, **options):
        logger = logging.getLogger('c3nav')

        try:
            process_map_updates()
        except DatabaseError:
            logger.error(_('There is already map update processing in progress.'))

        if not settings.HAS_REAL_CACHE:
            print(_('You have no external cache configured, so don\'t forget to restart your c3nav instance!'))
项目:nav    作者:UNINETT    | 项目源码 | 文件源码
def retry_on_db_loss():
    """Returns a nav.db.retry_on_db_loss decorator with eventengine's default
    parameters.
    """
    return nav.db.retry_on_db_loss(count=3, delay=5, fallback=harakiri,
                                   also_handled=(DatabaseError,))
项目:django-oauth2-test    作者:ONSdigital    | 项目源码 | 文件源码
def post(self, request, *args, **kwargs):
        """
        Take the user object from the request and call the 'save' method on it to persist to the DB. If this succeeds
        then we can report a success.

        :param request:
        :param args:
        :param kwargs:
        :return: Serialised JSON Response Object to indicate the resource has been created
        """
        stdlogger.debug("Hitting HTTP POST account view")

        # Try and persist the user to the DB. Remember this could fail a data integrity check if some other system has
        # saved this user before we run this line of code!
        try:
            request.user.save()

        except (IntegrityError, InternalError, DataError, DatabaseError):
            # The chances of this happening are slim to none! And this line of code should never happen. So we really
            # need to tell the other system we are not capable of creating the resource.
            raise DatabaseFailureException

        context = {'account': request.user.email, 'created': 'success'}
        json_context = JSONRenderer().render(context)

        return Response(data=json_context, status=status.HTTP_201_CREATED,)
项目:django-oauth2-test    作者:ONSdigital    | 项目源码 | 文件源码
def put(self, request):
        """
        Take the user object from the request and updates user info if it exists in the DB. If the email (which is
        unique) is to be updated this will be in a new attribute within the PUT message called new_username.
        If this succeeds then we can report a success.

        :param request:
        :param args:
        :param kwargs:
        :return: Serialised JSON Response Object to indicate the resource has been created
        """

        stdlogger.debug("Hitting HTTP PUT account view")

        # Try and persist the user to the DB. Remember this could fail a data integrity check if some other system has
        # saved this user before we run this line of code!
        try:
            # Check to see if this PUT is changing the user ID. If it is, then keep the same user object with Primary
            # Key and change the email to the new one.
            if request.new_username:
                stdlogger.info("Admin is updating a user ID to a new value")
                request.user.email = request.new_username
            request.user.save()

        except (IntegrityError, InternalError, DataError, DatabaseError):
            # The chances of this happening are slim to none! And this line of code should never happen. So we really
            # need to tell the other system we are not capable of creating the resource.
            raise DatabaseFailureException

        context = {'account': request.user.email, 'updated': 'success'}
        json_context = JSONRenderer().render(context)

        return Response(data=json_context, status=status.HTTP_201_CREATED,)
项目:django-boardinghouse    作者:schinckel    | 项目源码 | 文件源码
def test_loading_aware_data_without_a_schema_fails(self):
        with self.assertRaises(DatabaseError):
            with capture_err(call_command, 'loaddata', 'tests/fixtures/aware.json', commit=False) as output:
                self.assertIn('DatabaseError: Could not load tests.AwareModel(pk=None): relation "tests_awaremodel" does not exist\n', output)
项目:django-boardinghouse    作者:schinckel    | 项目源码 | 文件源码
def test_loading_aware_data_with_template_schema_fails(self):
        with self.assertRaises(TemplateSchemaActivation):
            with capture_err(call_command, 'loaddata', 'tests/fixtures/aware.json', schema="__template__", commit=False) as output:
                self.assertIn('DatabaseError: Could not load tests.AwareModel(pk=None): relation "tests_awaremodel" does not exist\n', output)
项目:hacku-devops-2017    作者:hackoregon    | 项目源码 | 文件源码
def test_determine_db_status(self, mock_query):
        """Health should not be ok if it cannot connect to the db"""

        mock_query.side_effect = DatabaseError()
        url = reverse('health-list')
        response = self.c.get(url)

        status = response.json().get("status", {})
        db_status = status.get('db')
        assert db_status == 'down', \
            'Expect DB to be down. Got: {}' . format (db_status)

        status = status.get('status')
        assert status == 'down', \
            'Expect status to be down. Got: {}' . format (status)
项目:hacku-devops-2017    作者:hackoregon    | 项目源码 | 文件源码
def test_determine_db_status(self, mock_query):
        """Health should not be ok if it cannot connect to the db"""

        mock_query.side_effect = DatabaseError()
        url = reverse('health-list')
        response = self.c.get(url)

        status = response.json().get("status", {})
        db_status = status.get('db')
        assert db_status == 'down', \
            'Expect DB to be down. Got: {}' . format (db_status)

        status = status.get('status')
        assert status == 'down', \
            'Expect status to be down. Got: {}' . format (status)
项目:maas    作者:maas    | 项目源码 | 文件源码
def _updateLastPing(self, node, message):
        """
        Update the last ping in any status which uses a script_set whenever a
        node in that status contacts us.
        """
        script_set_statuses = {
            NODE_STATUS.COMMISSIONING: 'current_commissioning_script_set_id',
            NODE_STATUS.TESTING: 'current_testing_script_set_id',
            NODE_STATUS.DEPLOYING: 'current_installation_script_set_id',
        }
        script_set_property = script_set_statuses.get(node.status)
        if script_set_property is not None:
            script_set_id = getattr(node, script_set_property)
            if script_set_id is not None:
                try:
                    script_set = ScriptSet.objects.select_for_update(
                        nowait=True).get(id=script_set_id)
                except ScriptSet.DoesNotExist:
                    # Wierd that it would be deleted, but let not cause a
                    # stack trace for this error.
                    pass
                except DatabaseError:
                    # select_for_update(nowait=True) failed instantly. Raise
                    # error so @transactional will retry the whole operation.
                    raise make_serialization_failure()
                else:
                    current_time = now()
                    if (script_set.last_ping is None or
                            current_time > script_set.last_ping):
                        script_set.last_ping = current_time
                        script_set.save(update_fields=['last_ping'])
项目:CodingDojo    作者:ComputerSocietyUNB    | 项目源码 | 文件源码
def _base_set(self, mode, key, value, timeout=DEFAULT_TIMEOUT):
        timeout = self.get_backend_timeout(timeout)
        db = router.db_for_write(self.cache_model_class)
        connection = connections[db]
        table = connection.ops.quote_name(self._table)

        with connection.cursor() as cursor:
            cursor.execute("SELECT COUNT(*) FROM %s" % table)
            num = cursor.fetchone()[0]
            now = timezone.now()
            now = now.replace(microsecond=0)
            if timeout is None:
                exp = datetime.max
            elif settings.USE_TZ:
                exp = datetime.utcfromtimestamp(timeout)
            else:
                exp = datetime.fromtimestamp(timeout)
            exp = exp.replace(microsecond=0)
            if num > self._max_entries:
                self._cull(db, cursor, now)
            pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
            b64encoded = base64.b64encode(pickled)
            # The DB column is expecting a string, so make sure the value is a
            # string, not bytes. Refs #19274.
            if six.PY3:
                b64encoded = b64encoded.decode('latin1')
            try:
                # Note: typecasting for datetimes is needed by some 3rd party
                # database backends. All core backends work without typecasting,
                # so be careful about changes here - test suite will NOT pick
                # regressions.
                with transaction.atomic(using=db):
                    cursor.execute("SELECT cache_key, expires FROM %s "
                                   "WHERE cache_key = %%s" % table, [key])
                    result = cursor.fetchone()

                    if result:
                        current_expires = result[1]
                        expression = models.Expression(output_field=models.DateTimeField())
                        for converter in (connection.ops.get_db_converters(expression) +
                                          expression.get_db_converters(connection)):
                            current_expires = converter(current_expires, expression, connection, {})

                    exp = connection.ops.adapt_datetimefield_value(exp)
                    if result and (mode == 'set' or (mode == 'add' and current_expires < now)):
                        cursor.execute("UPDATE %s SET value = %%s, expires = %%s "
                                       "WHERE cache_key = %%s" % table,
                                       [b64encoded, exp, key])
                    else:
                        cursor.execute("INSERT INTO %s (cache_key, value, expires) "
                                       "VALUES (%%s, %%s, %%s)" % table,
                                       [key, b64encoded, exp])
            except DatabaseError:
                # To be threadsafe, updates/inserts are allowed to fail silently
                return False
            else:
                return True
项目:NarshaTech    作者:KimJangHyeon    | 项目源码 | 文件源码
def _base_set(self, mode, key, value, timeout=DEFAULT_TIMEOUT):
        timeout = self.get_backend_timeout(timeout)
        db = router.db_for_write(self.cache_model_class)
        connection = connections[db]
        table = connection.ops.quote_name(self._table)

        with connection.cursor() as cursor:
            cursor.execute("SELECT COUNT(*) FROM %s" % table)
            num = cursor.fetchone()[0]
            now = timezone.now()
            now = now.replace(microsecond=0)
            if timeout is None:
                exp = datetime.max
            elif settings.USE_TZ:
                exp = datetime.utcfromtimestamp(timeout)
            else:
                exp = datetime.fromtimestamp(timeout)
            exp = exp.replace(microsecond=0)
            if num > self._max_entries:
                self._cull(db, cursor, now)
            pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
            b64encoded = base64.b64encode(pickled)
            # The DB column is expecting a string, so make sure the value is a
            # string, not bytes. Refs #19274.
            if six.PY3:
                b64encoded = b64encoded.decode('latin1')
            try:
                # Note: typecasting for datetimes is needed by some 3rd party
                # database backends. All core backends work without typecasting,
                # so be careful about changes here - test suite will NOT pick
                # regressions.
                with transaction.atomic(using=db):
                    cursor.execute("SELECT cache_key, expires FROM %s "
                                   "WHERE cache_key = %%s" % table, [key])
                    result = cursor.fetchone()

                    if result:
                        current_expires = result[1]
                        expression = models.Expression(output_field=models.DateTimeField())
                        for converter in (connection.ops.get_db_converters(expression) +
                                          expression.get_db_converters(connection)):
                            current_expires = converter(current_expires, expression, connection, {})

                    exp = connection.ops.adapt_datetimefield_value(exp)
                    if result and (mode == 'set' or (mode == 'add' and current_expires < now)):
                        cursor.execute("UPDATE %s SET value = %%s, expires = %%s "
                                       "WHERE cache_key = %%s" % table,
                                       [b64encoded, exp, key])
                    else:
                        cursor.execute("INSERT INTO %s (cache_key, value, expires) "
                                       "VALUES (%%s, %%s, %%s)" % table,
                                       [key, b64encoded, exp])
            except DatabaseError:
                # To be threadsafe, updates/inserts are allowed to fail silently
                return False
            else:
                return True
项目:Scrum    作者:prakharchoudhary    | 项目源码 | 文件源码
def _base_set(self, mode, key, value, timeout=DEFAULT_TIMEOUT):
        timeout = self.get_backend_timeout(timeout)
        db = router.db_for_write(self.cache_model_class)
        connection = connections[db]
        table = connection.ops.quote_name(self._table)

        with connection.cursor() as cursor:
            cursor.execute("SELECT COUNT(*) FROM %s" % table)
            num = cursor.fetchone()[0]
            now = timezone.now()
            now = now.replace(microsecond=0)
            if timeout is None:
                exp = datetime.max
            elif settings.USE_TZ:
                exp = datetime.utcfromtimestamp(timeout)
            else:
                exp = datetime.fromtimestamp(timeout)
            exp = exp.replace(microsecond=0)
            if num > self._max_entries:
                self._cull(db, cursor, now)
            pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
            b64encoded = base64.b64encode(pickled)
            # The DB column is expecting a string, so make sure the value is a
            # string, not bytes. Refs #19274.
            if six.PY3:
                b64encoded = b64encoded.decode('latin1')
            try:
                # Note: typecasting for datetimes is needed by some 3rd party
                # database backends. All core backends work without typecasting,
                # so be careful about changes here - test suite will NOT pick
                # regressions.
                with transaction.atomic(using=db):
                    cursor.execute("SELECT cache_key, expires FROM %s "
                                   "WHERE cache_key = %%s" % table, [key])
                    result = cursor.fetchone()

                    if result:
                        current_expires = result[1]
                        expression = models.Expression(output_field=models.DateTimeField())
                        for converter in (connection.ops.get_db_converters(expression) +
                                          expression.get_db_converters(connection)):
                            current_expires = converter(current_expires, expression, connection, {})

                    exp = connection.ops.adapt_datetimefield_value(exp)
                    if result and (mode == 'set' or (mode == 'add' and current_expires < now)):
                        cursor.execute("UPDATE %s SET value = %%s, expires = %%s "
                                       "WHERE cache_key = %%s" % table,
                                       [b64encoded, exp, key])
                    else:
                        cursor.execute("INSERT INTO %s (cache_key, value, expires) "
                                       "VALUES (%%s, %%s, %%s)" % table,
                                       [key, b64encoded, exp])
            except DatabaseError:
                # To be threadsafe, updates/inserts are allowed to fail silently
                return False
            else:
                return True
项目:django-livesettings3    作者:kunaldeo    | 项目源码 | 文件源码
def _value(self):
        global is_setting_initializing
        use_db, overrides = get_overrides()

        if not use_db:
            try:
                val = overrides[self.group.key][self.key]
            except KeyError:
                if self.use_default:
                    val = self.default
                else:
                    raise SettingNotSet('%s.%s is not in your LIVESETTINGS_OPTIONS' % (self.group.key, self.key))

        else:
            try:
                val = self.setting.value

            except SettingNotSet as sns:
                is_setting_initializing = False
                if self.use_default:
                    val = self.default
                    if overrides:
                        # maybe override the default
                        grp = overrides.get(self.group.key, {})
                        if self.key in grp:
                            val = grp[self.key]
                else:
                    val = NOTSET

            except AttributeError as ae:
                is_setting_initializing = False
                log.error("Attribute error: %s", ae)
                log.error("%s: Could not get _value of %s", self.key, self.setting)
                raise (ae)

            except Exception as e:
                global _WARN
                if is_setting_initializing and isinstance(e, DatabaseError) and str(e).find(
                        "livesettings_setting") > -1:
                    if 'livesettings_setting' not in _WARN:
                        log.warn(str(e).strip())
                        _WARN['livesettings_setting'] = True
                    log.warn('Error loading livesettings from table, OK if you are in syncdb or before it. ROLLBACK')
                    connection._rollback()

                    if self.use_default:
                        val = self.default
                    else:
                        raise ImproperlyConfigured("All settings used in startup must have defaults, %s.%s does not",
                                                   self.group.key, self.key)
                else:
                    is_setting_initializing = False
                    import traceback
                    traceback.print_exc()
                    log.error("Problem finding settings %s.%s, %s", self.group.key, self.key, e)
                    raise SettingNotSet("Startup error, couldn't load %s.%s" % (self.group.key, self.key))
            else:
                is_setting_initializing = False
        return val
项目:django    作者:alexsukhrin    | 项目源码 | 文件源码
def _base_set(self, mode, key, value, timeout=DEFAULT_TIMEOUT):
        timeout = self.get_backend_timeout(timeout)
        db = router.db_for_write(self.cache_model_class)
        connection = connections[db]
        table = connection.ops.quote_name(self._table)

        with connection.cursor() as cursor:
            cursor.execute("SELECT COUNT(*) FROM %s" % table)
            num = cursor.fetchone()[0]
            now = timezone.now()
            now = now.replace(microsecond=0)
            if timeout is None:
                exp = datetime.max
            elif settings.USE_TZ:
                exp = datetime.utcfromtimestamp(timeout)
            else:
                exp = datetime.fromtimestamp(timeout)
            exp = exp.replace(microsecond=0)
            if num > self._max_entries:
                self._cull(db, cursor, now)
            pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
            b64encoded = base64.b64encode(pickled)
            # The DB column is expecting a string, so make sure the value is a
            # string, not bytes. Refs #19274.
            if six.PY3:
                b64encoded = b64encoded.decode('latin1')
            try:
                # Note: typecasting for datetimes is needed by some 3rd party
                # database backends. All core backends work without typecasting,
                # so be careful about changes here - test suite will NOT pick
                # regressions.
                with transaction.atomic(using=db):
                    cursor.execute("SELECT cache_key, expires FROM %s "
                                   "WHERE cache_key = %%s" % table, [key])
                    result = cursor.fetchone()

                    if result:
                        current_expires = result[1]
                        expression = models.Expression(output_field=models.DateTimeField())
                        for converter in (connection.ops.get_db_converters(expression) +
                                          expression.get_db_converters(connection)):
                            current_expires = converter(current_expires, expression, connection, {})

                    exp = connection.ops.adapt_datetimefield_value(exp)
                    if result and (mode == 'set' or (mode == 'add' and current_expires < now)):
                        cursor.execute("UPDATE %s SET value = %%s, expires = %%s "
                                       "WHERE cache_key = %%s" % table,
                                       [b64encoded, exp, key])
                    else:
                        cursor.execute("INSERT INTO %s (cache_key, value, expires) "
                                       "VALUES (%%s, %%s, %%s)" % table,
                                       [key, b64encoded, exp])
            except DatabaseError:
                # To be threadsafe, updates/inserts are allowed to fail silently
                return False
            else:
                return True
项目:Gypsy    作者:benticarlos    | 项目源码 | 文件源码
def _base_set(self, mode, key, value, timeout=DEFAULT_TIMEOUT):
        timeout = self.get_backend_timeout(timeout)
        db = router.db_for_write(self.cache_model_class)
        connection = connections[db]
        table = connection.ops.quote_name(self._table)

        with connection.cursor() as cursor:
            cursor.execute("SELECT COUNT(*) FROM %s" % table)
            num = cursor.fetchone()[0]
            now = timezone.now()
            now = now.replace(microsecond=0)
            if timeout is None:
                exp = datetime.max
            elif settings.USE_TZ:
                exp = datetime.utcfromtimestamp(timeout)
            else:
                exp = datetime.fromtimestamp(timeout)
            exp = exp.replace(microsecond=0)
            if num > self._max_entries:
                self._cull(db, cursor, now)
            pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
            b64encoded = base64.b64encode(pickled)
            # The DB column is expecting a string, so make sure the value is a
            # string, not bytes. Refs #19274.
            if six.PY3:
                b64encoded = b64encoded.decode('latin1')
            try:
                # Note: typecasting for datetimes is needed by some 3rd party
                # database backends. All core backends work without typecasting,
                # so be careful about changes here - test suite will NOT pick
                # regressions.
                with transaction.atomic(using=db):
                    cursor.execute("SELECT cache_key, expires FROM %s "
                                   "WHERE cache_key = %%s" % table, [key])
                    result = cursor.fetchone()

                    if result:
                        current_expires = result[1]
                        expression = models.Expression(output_field=models.DateTimeField())
                        for converter in (connection.ops.get_db_converters(expression) +
                                          expression.get_db_converters(connection)):
                            current_expires = converter(current_expires, expression, connection, {})

                    exp = connection.ops.adapt_datetimefield_value(exp)
                    if result and (mode == 'set' or (mode == 'add' and current_expires < now)):
                        cursor.execute("UPDATE %s SET value = %%s, expires = %%s "
                                       "WHERE cache_key = %%s" % table,
                                       [b64encoded, exp, key])
                    else:
                        cursor.execute("INSERT INTO %s (cache_key, value, expires) "
                                       "VALUES (%%s, %%s, %%s)" % table,
                                       [key, b64encoded, exp])
            except DatabaseError:
                # To be threadsafe, updates/inserts are allowed to fail silently
                return False
            else:
                return True
项目:DjangoBlog    作者:0daybug    | 项目源码 | 文件源码
def _base_set(self, mode, key, value, timeout=DEFAULT_TIMEOUT):
        timeout = self.get_backend_timeout(timeout)
        db = router.db_for_write(self.cache_model_class)
        table = connections[db].ops.quote_name(self._table)

        with connections[db].cursor() as cursor:
            cursor.execute("SELECT COUNT(*) FROM %s" % table)
            num = cursor.fetchone()[0]
            now = timezone.now()
            now = now.replace(microsecond=0)
            if timeout is None:
                exp = datetime.max
            elif settings.USE_TZ:
                exp = datetime.utcfromtimestamp(timeout)
            else:
                exp = datetime.fromtimestamp(timeout)
            exp = exp.replace(microsecond=0)
            if num > self._max_entries:
                self._cull(db, cursor, now)
            pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
            b64encoded = base64.b64encode(pickled)
            # The DB column is expecting a string, so make sure the value is a
            # string, not bytes. Refs #19274.
            if six.PY3:
                b64encoded = b64encoded.decode('latin1')
            try:
                # Note: typecasting for datetimes is needed by some 3rd party
                # database backends. All core backends work without typecasting,
                # so be careful about changes here - test suite will NOT pick
                # regressions.
                with transaction.atomic(using=db):
                    cursor.execute("SELECT cache_key, expires FROM %s "
                                   "WHERE cache_key = %%s" % table, [key])
                    result = cursor.fetchone()
                    if result:
                        current_expires = result[1]
                        if (connections[db].features.needs_datetime_string_cast and not
                                isinstance(current_expires, datetime)):
                            current_expires = typecast_timestamp(str(current_expires))
                    exp = connections[db].ops.value_to_db_datetime(exp)
                    if result and (mode == 'set' or (mode == 'add' and current_expires < now)):
                        cursor.execute("UPDATE %s SET value = %%s, expires = %%s "
                                       "WHERE cache_key = %%s" % table,
                                       [b64encoded, exp, key])
                    else:
                        cursor.execute("INSERT INTO %s (cache_key, value, expires) "
                                       "VALUES (%%s, %%s, %%s)" % table,
                                       [key, b64encoded, exp])
            except DatabaseError:
                # To be threadsafe, updates/inserts are allowed to fail silently
                return False
            else:
                return True
项目:wanblog    作者:wanzifa    | 项目源码 | 文件源码
def _base_set(self, mode, key, value, timeout=DEFAULT_TIMEOUT):
        timeout = self.get_backend_timeout(timeout)
        db = router.db_for_write(self.cache_model_class)
        connection = connections[db]
        table = connection.ops.quote_name(self._table)

        with connection.cursor() as cursor:
            cursor.execute("SELECT COUNT(*) FROM %s" % table)
            num = cursor.fetchone()[0]
            now = timezone.now()
            now = now.replace(microsecond=0)
            if timeout is None:
                exp = datetime.max
            elif settings.USE_TZ:
                exp = datetime.utcfromtimestamp(timeout)
            else:
                exp = datetime.fromtimestamp(timeout)
            exp = exp.replace(microsecond=0)
            if num > self._max_entries:
                self._cull(db, cursor, now)
            pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
            b64encoded = base64.b64encode(pickled)
            # The DB column is expecting a string, so make sure the value is a
            # string, not bytes. Refs #19274.
            if six.PY3:
                b64encoded = b64encoded.decode('latin1')
            try:
                # Note: typecasting for datetimes is needed by some 3rd party
                # database backends. All core backends work without typecasting,
                # so be careful about changes here - test suite will NOT pick
                # regressions.
                with transaction.atomic(using=db):
                    cursor.execute("SELECT cache_key, expires FROM %s "
                                   "WHERE cache_key = %%s" % table, [key])
                    result = cursor.fetchone()

                    if result:
                        current_expires = result[1]
                        expression = models.Expression(output_field=models.DateTimeField())
                        for converter in (connection.ops.get_db_converters(expression) +
                                          expression.get_db_converters(connection)):
                            current_expires = converter(current_expires, expression, connection, {})

                    exp = connection.ops.adapt_datetimefield_value(exp)
                    if result and (mode == 'set' or (mode == 'add' and current_expires < now)):
                        cursor.execute("UPDATE %s SET value = %%s, expires = %%s "
                                       "WHERE cache_key = %%s" % table,
                                       [b64encoded, exp, key])
                    else:
                        cursor.execute("INSERT INTO %s (cache_key, value, expires) "
                                       "VALUES (%%s, %%s, %%s)" % table,
                                       [key, b64encoded, exp])
            except DatabaseError:
                # To be threadsafe, updates/inserts are allowed to fail silently
                return False
            else:
                return True
项目:tabmaster    作者:NicolasMinghetti    | 项目源码 | 文件源码
def _base_set(self, mode, key, value, timeout=DEFAULT_TIMEOUT):
        timeout = self.get_backend_timeout(timeout)
        db = router.db_for_write(self.cache_model_class)
        connection = connections[db]
        table = connection.ops.quote_name(self._table)

        with connection.cursor() as cursor:
            cursor.execute("SELECT COUNT(*) FROM %s" % table)
            num = cursor.fetchone()[0]
            now = timezone.now()
            now = now.replace(microsecond=0)
            if timeout is None:
                exp = datetime.max
            elif settings.USE_TZ:
                exp = datetime.utcfromtimestamp(timeout)
            else:
                exp = datetime.fromtimestamp(timeout)
            exp = exp.replace(microsecond=0)
            if num > self._max_entries:
                self._cull(db, cursor, now)
            pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
            b64encoded = base64.b64encode(pickled)
            # The DB column is expecting a string, so make sure the value is a
            # string, not bytes. Refs #19274.
            if six.PY3:
                b64encoded = b64encoded.decode('latin1')
            try:
                # Note: typecasting for datetimes is needed by some 3rd party
                # database backends. All core backends work without typecasting,
                # so be careful about changes here - test suite will NOT pick
                # regressions.
                with transaction.atomic(using=db):
                    cursor.execute("SELECT cache_key, expires FROM %s "
                                   "WHERE cache_key = %%s" % table, [key])
                    result = cursor.fetchone()

                    if result:
                        current_expires = result[1]
                        expression = models.Expression(output_field=models.DateTimeField())
                        for converter in (connection.ops.get_db_converters(expression) +
                                          expression.get_db_converters(connection)):
                            current_expires = converter(current_expires, expression, connection, {})

                    exp = connection.ops.adapt_datetimefield_value(exp)
                    if result and (mode == 'set' or (mode == 'add' and current_expires < now)):
                        cursor.execute("UPDATE %s SET value = %%s, expires = %%s "
                                       "WHERE cache_key = %%s" % table,
                                       [b64encoded, exp, key])
                    else:
                        cursor.execute("INSERT INTO %s (cache_key, value, expires) "
                                       "VALUES (%%s, %%s, %%s)" % table,
                                       [key, b64encoded, exp])
            except DatabaseError:
                # To be threadsafe, updates/inserts are allowed to fail silently
                return False
            else:
                return True
项目:trydjango18    作者:lucifer-yqh    | 项目源码 | 文件源码
def _base_set(self, mode, key, value, timeout=DEFAULT_TIMEOUT):
        timeout = self.get_backend_timeout(timeout)
        db = router.db_for_write(self.cache_model_class)
        table = connections[db].ops.quote_name(self._table)

        with connections[db].cursor() as cursor:
            cursor.execute("SELECT COUNT(*) FROM %s" % table)
            num = cursor.fetchone()[0]
            now = timezone.now()
            now = now.replace(microsecond=0)
            if timeout is None:
                exp = datetime.max
            elif settings.USE_TZ:
                exp = datetime.utcfromtimestamp(timeout)
            else:
                exp = datetime.fromtimestamp(timeout)
            exp = exp.replace(microsecond=0)
            if num > self._max_entries:
                self._cull(db, cursor, now)
            pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
            b64encoded = base64.b64encode(pickled)
            # The DB column is expecting a string, so make sure the value is a
            # string, not bytes. Refs #19274.
            if six.PY3:
                b64encoded = b64encoded.decode('latin1')
            try:
                # Note: typecasting for datetimes is needed by some 3rd party
                # database backends. All core backends work without typecasting,
                # so be careful about changes here - test suite will NOT pick
                # regressions.
                with transaction.atomic(using=db):
                    cursor.execute("SELECT cache_key, expires FROM %s "
                                   "WHERE cache_key = %%s" % table, [key])
                    result = cursor.fetchone()
                    if result:
                        current_expires = result[1]
                        if (connections[db].features.needs_datetime_string_cast and not
                                isinstance(current_expires, datetime)):
                            current_expires = typecast_timestamp(str(current_expires))
                    exp = connections[db].ops.value_to_db_datetime(exp)
                    if result and (mode == 'set' or (mode == 'add' and current_expires < now)):
                        cursor.execute("UPDATE %s SET value = %%s, expires = %%s "
                                       "WHERE cache_key = %%s" % table,
                                       [b64encoded, exp, key])
                    else:
                        cursor.execute("INSERT INTO %s (cache_key, value, expires) "
                                       "VALUES (%%s, %%s, %%s)" % table,
                                       [key, b64encoded, exp])
            except DatabaseError:
                # To be threadsafe, updates/inserts are allowed to fail silently
                return False
            else:
                return True
项目:trydjango18    作者:wei0104    | 项目源码 | 文件源码
def _base_set(self, mode, key, value, timeout=DEFAULT_TIMEOUT):
        timeout = self.get_backend_timeout(timeout)
        db = router.db_for_write(self.cache_model_class)
        table = connections[db].ops.quote_name(self._table)

        with connections[db].cursor() as cursor:
            cursor.execute("SELECT COUNT(*) FROM %s" % table)
            num = cursor.fetchone()[0]
            now = timezone.now()
            now = now.replace(microsecond=0)
            if timeout is None:
                exp = datetime.max
            elif settings.USE_TZ:
                exp = datetime.utcfromtimestamp(timeout)
            else:
                exp = datetime.fromtimestamp(timeout)
            exp = exp.replace(microsecond=0)
            if num > self._max_entries:
                self._cull(db, cursor, now)
            pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
            b64encoded = base64.b64encode(pickled)
            # The DB column is expecting a string, so make sure the value is a
            # string, not bytes. Refs #19274.
            if six.PY3:
                b64encoded = b64encoded.decode('latin1')
            try:
                # Note: typecasting for datetimes is needed by some 3rd party
                # database backends. All core backends work without typecasting,
                # so be careful about changes here - test suite will NOT pick
                # regressions.
                with transaction.atomic(using=db):
                    cursor.execute("SELECT cache_key, expires FROM %s "
                                   "WHERE cache_key = %%s" % table, [key])
                    result = cursor.fetchone()
                    if result:
                        current_expires = result[1]
                        if (connections[db].features.needs_datetime_string_cast and not
                                isinstance(current_expires, datetime)):
                            current_expires = typecast_timestamp(str(current_expires))
                    exp = connections[db].ops.value_to_db_datetime(exp)
                    if result and (mode == 'set' or (mode == 'add' and current_expires < now)):
                        cursor.execute("UPDATE %s SET value = %%s, expires = %%s "
                                       "WHERE cache_key = %%s" % table,
                                       [b64encoded, exp, key])
                    else:
                        cursor.execute("INSERT INTO %s (cache_key, value, expires) "
                                       "VALUES (%%s, %%s, %%s)" % table,
                                       [key, b64encoded, exp])
            except DatabaseError:
                # To be threadsafe, updates/inserts are allowed to fail silently
                return False
            else:
                return True
项目:ims    作者:ims-team    | 项目源码 | 文件源码
def _base_set(self, mode, key, value, timeout=DEFAULT_TIMEOUT):
        timeout = self.get_backend_timeout(timeout)
        db = router.db_for_write(self.cache_model_class)
        connection = connections[db]
        table = connection.ops.quote_name(self._table)

        with connection.cursor() as cursor:
            cursor.execute("SELECT COUNT(*) FROM %s" % table)
            num = cursor.fetchone()[0]
            now = timezone.now()
            now = now.replace(microsecond=0)
            if timeout is None:
                exp = datetime.max
            elif settings.USE_TZ:
                exp = datetime.utcfromtimestamp(timeout)
            else:
                exp = datetime.fromtimestamp(timeout)
            exp = exp.replace(microsecond=0)
            if num > self._max_entries:
                self._cull(db, cursor, now)
            pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
            b64encoded = base64.b64encode(pickled)
            # The DB column is expecting a string, so make sure the value is a
            # string, not bytes. Refs #19274.
            if six.PY3:
                b64encoded = b64encoded.decode('latin1')
            try:
                # Note: typecasting for datetimes is needed by some 3rd party
                # database backends. All core backends work without typecasting,
                # so be careful about changes here - test suite will NOT pick
                # regressions.
                with transaction.atomic(using=db):
                    cursor.execute("SELECT cache_key, expires FROM %s "
                                   "WHERE cache_key = %%s" % table, [key])
                    result = cursor.fetchone()

                    if result:
                        current_expires = result[1]
                        expression = models.Expression(output_field=models.DateTimeField())
                        for converter in (connection.ops.get_db_converters(expression) +
                                          expression.get_db_converters(connection)):
                            current_expires = converter(current_expires, expression, connection, {})

                    exp = connection.ops.adapt_datetimefield_value(exp)
                    if result and (mode == 'set' or (mode == 'add' and current_expires < now)):
                        cursor.execute("UPDATE %s SET value = %%s, expires = %%s "
                                       "WHERE cache_key = %%s" % table,
                                       [b64encoded, exp, key])
                    else:
                        cursor.execute("INSERT INTO %s (cache_key, value, expires) "
                                       "VALUES (%%s, %%s, %%s)" % table,
                                       [key, b64encoded, exp])
            except DatabaseError:
                # To be threadsafe, updates/inserts are allowed to fail silently
                return False
            else:
                return True
项目:lifesoundtrack    作者:MTG    | 项目源码 | 文件源码
def _base_set(self, mode, key, value, timeout=DEFAULT_TIMEOUT):
        timeout = self.get_backend_timeout(timeout)
        db = router.db_for_write(self.cache_model_class)
        connection = connections[db]
        table = connection.ops.quote_name(self._table)

        with connection.cursor() as cursor:
            cursor.execute("SELECT COUNT(*) FROM %s" % table)
            num = cursor.fetchone()[0]
            now = timezone.now()
            now = now.replace(microsecond=0)
            if timeout is None:
                exp = datetime.max
            elif settings.USE_TZ:
                exp = datetime.utcfromtimestamp(timeout)
            else:
                exp = datetime.fromtimestamp(timeout)
            exp = exp.replace(microsecond=0)
            if num > self._max_entries:
                self._cull(db, cursor, now)
            pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
            b64encoded = base64.b64encode(pickled)
            # The DB column is expecting a string, so make sure the value is a
            # string, not bytes. Refs #19274.
            if six.PY3:
                b64encoded = b64encoded.decode('latin1')
            try:
                # Note: typecasting for datetimes is needed by some 3rd party
                # database backends. All core backends work without typecasting,
                # so be careful about changes here - test suite will NOT pick
                # regressions.
                with transaction.atomic(using=db):
                    cursor.execute("SELECT cache_key, expires FROM %s "
                                   "WHERE cache_key = %%s" % table, [key])
                    result = cursor.fetchone()

                    if result:
                        current_expires = result[1]
                        expression = models.Expression(output_field=models.DateTimeField())
                        for converter in (connection.ops.get_db_converters(expression) +
                                          expression.get_db_converters(connection)):
                            current_expires = converter(current_expires, expression, connection, {})

                    exp = connection.ops.adapt_datetimefield_value(exp)
                    if result and (mode == 'set' or (mode == 'add' and current_expires < now)):
                        cursor.execute("UPDATE %s SET value = %%s, expires = %%s "
                                       "WHERE cache_key = %%s" % table,
                                       [b64encoded, exp, key])
                    else:
                        cursor.execute("INSERT INTO %s (cache_key, value, expires) "
                                       "VALUES (%%s, %%s, %%s)" % table,
                                       [key, b64encoded, exp])
            except DatabaseError:
                # To be threadsafe, updates/inserts are allowed to fail silently
                return False
            else:
                return True