Python elasticsearch 模块,ElasticsearchException() 实例源码

我们从Python开源项目中,提取了以下30个代码示例,用于说明如何使用elasticsearch.ElasticsearchException()

项目:elasticsearch2-haystack    作者:NDevox    | 项目源码 | 文件源码
def setup():
    try:
        from elasticsearch import Elasticsearch, ElasticsearchException
    except ImportError:
        raise unittest.SkipTest("elasticsearch-py not installed.")

    es = Elasticsearch(settings.HAYSTACK_CONNECTIONS['elasticsearch']['URL'])
    try:
        es.info()
    except ElasticsearchException as e:
        raise unittest.SkipTest("elasticsearch not running on %r" % settings.HAYSTACK_CONNECTIONS['elasticsearch']['URL'], e)

    global test_runner
    global old_config

    from django.test.runner import DiscoverRunner

    test_runner = DiscoverRunner()
    test_runner.setup_test_environment()
    old_config = test_runner.setup_databases()
项目:partycrasher    作者:naturalness    | 项目源码 | 文件源码
def search(self, body, **kwargs):
        assert 'index' not in kwargs
        if isinstance(body, string_types):
            pass
        else:
            body=elastify(body)
        tries = 0
        while True:
            tries += 1
            try:
                return self.esstore.es.search(
                    index=self.index_base,
                    body=body,
                    **kwargs)
            except ElasticsearchException as e:
                if (tries <= 1):
                    self.esstore.yellow()
                else:
                    raise ESError(e)
项目:graph-data-experiment    作者:occrp-attic    | 项目源码 | 文件源码
def load_records(task, dataset_name, query_idx, records):
    """Load a single batch of QUEUE_PAGE records from the given query."""
    dataset = model.get_dataset(dataset_name)
    items = []
    for record in records:
        for item in map_record(dataset.queries[query_idx], record):
            items.append(item)

    try:
        index_items(items)
    except ElasticsearchException as exc:
        time.sleep(30)
        raise task.retry(exc=exc, countdown=30, max_retries=5)

    log.info("[%r] Indexed %s records as %s documents...",
             dataset_name, len(records), len(items))
项目:django-haystack-elasticsearch    作者:CraveFood    | 项目源码 | 文件源码
def setup():
    log = logging.getLogger('haystack')
    try:
        import elasticsearch
        if not ((1, 0, 0) <= elasticsearch.__version__ < (2, 0, 0)):
            raise ImportError
        from elasticsearch import Elasticsearch, ElasticsearchException
    except ImportError:
        log.error("elasticsearch-py not installed.", exc_info=True)
        raise unittest.SkipTest("elasticsearch-py not installed.")

    es = Elasticsearch(settings.HAYSTACK_CONNECTIONS['default']['URL'])
    try:
        es.info()
    except ElasticsearchException as e:
        log.error("elasticsearch not running on %r" % \
                       settings.HAYSTACK_CONNECTIONS['default']['URL'], exc_info=True)
        raise unittest.SkipTest("elasticsearch not running on %r" % \
                                settings.HAYSTACK_CONNECTIONS['default']['URL'], e)
项目:stackstorm-elasticsearch    作者:StackStorm-Exchange    | 项目源码 | 文件源码
def simple_search(self):
        """Perform URI-based request search.
        """
        accepted_params = ('q', 'df', 'default_operator', 'from', 'size')
        kwargs = {k: self.config[k] for k in accepted_params if self.config[k]}
        wl = curator.IndexList(self.client)
        indices = ','.join(wl.working_list())

        try:
            result = self.client.search(index=indices, **kwargs)
        except elasticsearch.ElasticsearchException as e:
            logger.error(e.message)
            sys.exit(2)

        if self._return_object:
            return True, result
        else:
            self._pp_exit(result)
            return None
项目:stackstorm-elasticsearch    作者:StackStorm-Exchange    | 项目源码 | 文件源码
def full_search(self):
        """Perform search using Query DSL.
        """
        accepted_params = ('from', 'size')
        kwargs = {k: self.config[k] for k in accepted_params if self.config[k]}
        try:
            result = self.client.search(index=self.config.index,
                                        body=self.config.body, **kwargs)
        except elasticsearch.ElasticsearchException as e:
            logger.error(e.message)
            sys.exit(2)

        if self._return_object:
            return True, result
        else:
            self._pp_exit(result)
            return None
项目:PEBA    作者:dtag-dev-sec    | 项目源码 | 文件源码
def getNumberAlerts(timeframe, clientDomain):
    ''' retrieves number of alerts from index in timeframe (minutes)'''
    try:
        res = es.search(index=esindex, body={
            "query": {
                "bool": {
                    "must": [
                        {
                            "match": {
                                "clientDomain": clientDomain
                            }
                        }
                    ],
                    "filter": [
                        {
                            "range": {
                                "createTime": {
                                    "gte": "now-"+str(timeframe)+"m"
                                }
                            }
                        }
                    ]
                }
            },
            "size": 0
        })
        return res['hits']['total']
    except ElasticsearchException as err:
        print('ElasticSearch error: %s' % err)

    return False

########################
### Functions to SET data
########################
项目:PEBA    作者:dtag-dev-sec    | 项目源码 | 文件源码
def queryAlertsWithoutIP(maxAlerts, clientDomain):
    """ Get IP addresses from alerts in elasticsearch """
    try:
        res = es.search(index=esindex, body={
            "query": {
                "match": {
                    "clientDomain": clientDomain
                }
            },
            "sort": {
                "recievedTime": {
                    "order": "desc"
                    }
                },
            "size": maxAlerts,
            "_source": [
                "createTime",
                "peerType",
                "country",
                "originalRequestString",
                "location",
                "targetCountry",
                "countryName",
                "locationDestination",
                "recievedTime",
                "username",
                "password",
                "login"
                ]
            })
        return res["hits"]["hits"]
    except ElasticsearchException as err:
        print('ElasticSearch error: %s' % err)

    return False
项目:PEBA    作者:dtag-dev-sec    | 项目源码 | 文件源码
def queryAlerts(maxAlerts, clientDomain):
    """ Get IP addresses from alerts in elasticsearch """
    try:
        res = es.search(index=app.config['ELASTICINDEX'], body={
            "query": {
                "match": {
                    "clientDomain": clientDomain
                }
            },
            "sort": {
                "recievedTime": {
                    "order": "desc"
                    }
                },
            "size": maxAlerts,
            "_source": [
                "createTime",
                "recievedTime",
                "peerIdent",
                "peerType",
                "country",
                "targetCountry",
                "originalRequestString",
                "location",
                "sourceEntryIp"
                ]
            })
        return res["hits"]["hits"]
    except ElasticsearchException as err:
        app.logger.error('ElasticSearch error: %s' %  err)

    return False
项目:PEBA    作者:dtag-dev-sec    | 项目源码 | 文件源码
def queryAlertsWithoutIP(maxAlerts, clientDomain):
    """ Get IP addresses from alerts in elasticsearch """
    try:
        res = es.search(index=app.config['ELASTICINDEX'], body={
            "query": {
                "match": {
                    "clientDomain": clientDomain
                }
            },
            "sort": {
                "recievedTime": {
                    "order": "desc"
                    }
                },
            "size": maxAlerts,
            "_source": [
                "createTime",
                "peerType",
                "country",
                "originalRequestString",
                "location",
                "targetCountry",
                "countryName",
                "locationDestination",
                "recievedTime",
                "username",
                "password",
                "login"
                ]
            })
        return res["hits"]["hits"]
    except ElasticsearchException as err:
        app.logger.error('ElasticSearch error: %s' % err)

    return False
项目:PEBA    作者:dtag-dev-sec    | 项目源码 | 文件源码
def queryAlertsCount(timeframe, clientDomain):
    """ Get number of Alerts in timeframe in elasticsearch """

    # check if timespan = d or number
    if timeframe == "day":
        span = "now/d"
    elif timeframe.isdecimal():
        span = "now-%sm" % timeframe
    else:
        app.logger.error('Non numeric value in retrieveAlertsCount timespan. Must be decimal number (in minutes) or string "day"')
        return False

    try:
        res = es.search(index=app.config['ELASTICINDEX'], body={
          "query": {
            "bool": {
              "must": [
                {
                  "match": {
                    "clientDomain": clientDomain
                  }
                }
              ],
              "filter": [
                {
                  "range": {
                    "recievedTime": {
                        "gte": str(span)
                    }
                  }
                }
              ]
            }
          },
          "size": 0
        })
        return res['hits']['total']
    except ElasticsearchException as err:
        app.logger.error('ElasticSearch error: %s' %  err)

    return False
项目:kinto-elasticsearch    作者:Kinto    | 项目源码 | 文件源码
def test_cli_logs_elasticsearch_exceptions(self):
        indexer = mock.MagicMock()
        indexer.bulk().__enter__().index_record.side_effect = elasticsearch.ElasticsearchException

        with mock.patch('kinto_elasticsearch.command_reindex.logger') as logger:
            with mock.patch('kinto_elasticsearch.command_reindex.get_paginated_records',
                            return_value=[[{}, {}]]) as get_paginated_records:
                reindex_records(indexer,
                                mock.sentinel.storage,
                                mock.sentinel.bucket_id,
                                mock.sentinel.collection_id)
                get_paginated_records.assert_called_with(mock.sentinel.storage,
                                                         mock.sentinel.bucket_id,
                                                         mock.sentinel.collection_id)
                logger.exception.assert_called_with('Failed to index record')
项目:kinto-elasticsearch    作者:Kinto    | 项目源码 | 文件源码
def test_response_is_served_if_indexer_fails(self):
        with mock.patch("kinto_elasticsearch.indexer.elasticsearch.helpers.bulk",
                        side_effect=elasticsearch.ElasticsearchException):
            r = self.app.post_json("/buckets/bid/collections/cid/records",
                                   {"data": {"hola": "mundo"}},
                                   headers=self.headers)
            assert r.status_code == 201
项目:kinto-elasticsearch    作者:Kinto    | 项目源码 | 文件源码
def test_search_response_is_empty_if_indexer_fails(self):
        with mock.patch("kinto_elasticsearch.indexer.Indexer.search",
                        side_effect=elasticsearch.ElasticsearchException):
            resp = self.app.post("/buckets/bid/collections/cid/search",
                                 headers=self.headers)
            result = resp.json
            assert result == {}
项目:kinto-elasticsearch    作者:Kinto    | 项目源码 | 文件源码
def reindex_records(indexer, storage, bucket_id, collection_id):
    total = 0
    for records in get_paginated_records(storage, bucket_id, collection_id):
        try:
            with indexer.bulk() as bulk:
                for record in records:
                    bulk.index_record(bucket_id,
                                      collection_id,
                                      record=record)
                print(".", end="")
            total += len(bulk.operations)
        except elasticsearch.ElasticsearchException:
            logger.exception("Failed to index record")
    print("\n%s records reindexed." % total)
项目:mod-elastic-logs    作者:descrepes    | 项目源码 | 文件源码
def create_index(self,index):
        try:
            logger.debug("[elastic-logs] Creating index %s ...", index)
            self.es.indices.create(index)
        except ElasticsearchException, exp:
            logger.error("[elastic-logs] exception while creating index %s: %s", index, str(exp))
项目:mod-elastic-logs    作者:descrepes    | 项目源码 | 文件源码
def is_index_exists(self,index):
    if not self.is_connected == CONNECTED:
            try:
                if self.es.indices.exists(index):
                    return True
                else:
                    return False
            except ElasticsearchException, exp:
                logger.error("[elastic-logs] exception while checking the existance of the index %s: %s", index, str(exp))

    return True
项目:querygraph    作者:peter-woyzbun    | 项目源码 | 文件源码
def __init__(self, name, host, port, doc_type, index):
        self.host = host
        self.port = port
        self.doc_type = doc_type
        self.index = index
        DatabaseInterface.__init__(self,
                                   name=name,
                                   db_type='ElasticSearch',
                                   conn_exception=elasticsearch.ConnectionError,
                                   execution_exception=elasticsearch.ElasticsearchException,
                                   type_converter=self.TYPE_CONVERTER,
                                   deserialize_query=True)
项目:asynces    作者:fabregas    | 项目源码 | 文件源码
def wait_es(cls):
        es = AsyncElasticsearch(
            ['{}:{}'.format(cls.es_host, cls.es_port)], loop=cls.loop)
        for i in range(40):
            try:
                await es.ping()
            except elasticsearch.ElasticsearchException:
                await asyncio.sleep(0.5, loop=cls.loop)
            else:
                es.close()
                break

        else:
            raise RuntimeError("es connection error")
项目:PEBA    作者:dtag-dev-sec    | 项目源码 | 文件源码
def queryAlertStats(clientDomain):
    """ Get combined statistics from elasticsearch """
    try:
        res = es.search(index=esindex, body={
            "aggs": {
                "communityfilter": {
                    "filter": {
                        "term": {
                            "clientDomain": clientDomain
                        }
                    },
            "aggs": {
            "ctr": {
              "range": {
                "field": "createTime",
                "ranges": [
                  {
                    "key": "1d",
                    "from": "now-1440m"
                  },
                  {
                    "key": "1h",
                    "from": "now-60m"
                  },
                  {
                    "key": "5m",
                    "from": "now-5m"
                  },
                  {
                    "key": "1m",
                    "from": "now-1m"
                  }
                ]
              }
            }}}
          },
          "size": 0
        })
        return res['aggregations']['communityfilter']['ctr']['buckets']
    except ElasticsearchException as err:
        print('ElasticSearch error: %s' % err)

    return False
项目:PEBA    作者:dtag-dev-sec    | 项目源码 | 文件源码
def queryAlertsCountWithType(timeframe, clientDomain):
    """ Get number of Alerts in timeframe in elasticsearch """

    # check if timespan = d or number
    if timeframe == "day":
        span = "now/d"
    elif timeframe.isdecimal():
        span = "now-%sm" % timeframe
    else:
        print('Non numeric value in retrieveAlertsCountWithType timespan. Must be decimal number (in minutes) or string "day"')
        return False

    try:
        res = es.search(index=esindex, body={
          "query": {
            "range": {
              "createTime": {
                  "gte": str(span)
              }
            }
          },
          "aggs": {
            "communityfilter": {
              "filter": {
                "term": {
                  "clientDomain": clientDomain
                }
              },
              "aggs": {
                "honeypotTypes": {
                  "terms": {
                    "field": "peerType.keyword"
                  }
                }
              }
            }
          },
          "size": 0
        })
        return res
    except ElasticsearchException as err:
        print('ElasticSearch error: %s' %  err)

    return False
项目:PEBA    作者:dtag-dev-sec    | 项目源码 | 文件源码
def authenticate(username, token):
    """ Authenticate user from cache or in ES """

    # check for user in cache
    authtoken = getCache(username, "user")
    if authtoken is not False:
        if len(authtoken) == 128:
            tokenhash = hashlib.sha512(token.encode('utf-8')).hexdigest()
            if authtoken == tokenhash:
                return True
        elif len(authtoken) == 32:
            tokenhash = hashlib.md5(token.encode('utf-8')).hexdigest()
            if authtoken == tokenhash:
                return True
        else:
            app.logger.error('authenticate(): Hash "{0}" for user "{1}" is not matching md5 or sha512 length! Needs to be checked in memcache!'.format(authtoken, username))

    # query ES
    else:
        try:
            res = es.search(index=app.config['WSUSERINDEX'], body={
                  "query": {
                    "term": {
                      "peerName.keyword": username
                    }
                  }
                })

            if res["hits"]["total"] > 1:
                app.logger.error('authenticate(): More than one user "%s" in ES index "users" found!' % username)
            elif res["hits"]["total"] < 1:
                app.logger.error('authenticate(): No user "%s" in ES index "users" found!' % username)
            elif res["hits"]["total"] == 1:
                authtoken = res["hits"]["hits"][0]["_source"]["token"]
                getOnly = res["hits"]["hits"][0]["_source"]["getOnly"]
                community = res["hits"]["hits"][0]["_source"]["community"]

                if len(authtoken) == 128:
                    tokenhash = hashlib.sha512(token.encode('utf-8')).hexdigest()
                    if authtoken == tokenhash:
                        # add user and token to cache for 24h
                        setCache(username, authtoken, (60 * 60 * 24), "user")
                        return True
                elif len(authtoken) == 32:
                    tokenhash = hashlib.md5(token.encode('utf-8')).hexdigest()
                    if authtoken == tokenhash:
                        # add user and token to cache for 24h
                        setCache(username, authtoken, (60 * 60 * 24),"user")
                        return True
                else:
                    app.logger.error('authenticate(): Hash "{0}" for user "{1}" is not matching md5 or sha512 length! Needs to be checked in ES index!'.format(authtoken, username))
                    return False

        except ElasticsearchException as err:
            app.logger.error('ElasticSearch error: %s' %  err)

    return False
项目:PEBA    作者:dtag-dev-sec    | 项目源码 | 文件源码
def queryAlertsCountWithType(timeframe, clientDomain):
    """ Get number of Alerts in timeframe in elasticsearch """

    # check if timespan = d or number
    if timeframe == "day":
        span = "now/d"
    elif timeframe.isdecimal():
        span = "now-%sm" % timeframe
    else:
        app.logger.error('Non numeric value in retrieveAlertsCountWithType timespan. Must be decimal number (in minutes) or string "day"')
        return False

    try:
        res = es.search(index=app.config['ELASTICINDEX'], body={
          "query": {
            "range": {
              "recievedTime": {
                  "gte": str(span)
              }
            }
          },
          "aggs": {
            "communityfilter": {
              "filter": {
                "term": {
                  "clientDomain": clientDomain
                }
              },
              "aggs": {
                "honeypotTypes": {
                  "terms": {
                    "field": "peerType.keyword"
                  }
                }
              }
            }
          },
          "size": 0
        })
        return res
    except ElasticsearchException as err:
        app.logger.error('ElasticSearch error: %s' %  err)

    return False
项目:PEBA    作者:dtag-dev-sec    | 项目源码 | 文件源码
def queryDatasetAlertsPerMonth(days, clientDomain):
    # check if months is a number
    if days is None:
        span = "now-1M/d"
    elif days.isdecimal():
        span = "now-%sd/d" % days
    else:
        app.logger.error('Non numeric value in datasetAlertsPerMonth timespan. Must be decimal number in days')
        return False

    try:
        res = es.search(index=app.config['ELASTICINDEX'], body={
              "query": {
                "range": {
                  "createTime": {
                    "gte": str(span)
                  }
                }
              },
            "aggs": {
                "communityfilter": {
                    "filter": {
                        "term": {
                            "clientDomain": clientDomain
                        }
                    },
            "aggs": {
                "range": {
                  "date_histogram": {
                    "field": "createTime",
                    "interval": "day"
                            }
                        }
                    }
                }
              },
              "size": 0
                })
        return res["aggregations"]["communityfilter"]["range"]
    except ElasticsearchException as err:
        app.logger.error('ElasticSearch error: %s' %  err)

    return False
项目:PEBA    作者:dtag-dev-sec    | 项目源码 | 文件源码
def queryAlertStats(clientDomain):
    """ Get combined statistics from elasticsearch """
    try:
        res = es.search(index=app.config['ELASTICINDEX'], body={
            "aggs": {
                "communityfilter": {
                    "filter": {
                        "term": {
                            "clientDomain": clientDomain
                        }
                    },
            "aggs": {
            "ctr": {
              "range": {
                "field": "createTime",
                "ranges": [
                  {
                    "key": "1d",
                    "from": "now-1440m"
                  },
                  {
                    "key": "1h",
                    "from": "now-60m"
                  },
                  {
                    "key": "5m",
                    "from": "now-5m"
                  },
                  {
                    "key": "1m",
                    "from": "now-1m"
                  }
                ]
              }
            }}}
          },
          "size": 0
        })
        return res['aggregations']['communityfilter']['ctr']['buckets']
    except ElasticsearchException as err:
        app.logger.error('ElasticSearch error: %s' % err)

    return False
项目:PEBA    作者:dtag-dev-sec    | 项目源码 | 文件源码
def queryForSingleIP(maxAlerts, ip, clientDomain):
    """ Get data for specific IP addresse from elasticsearch """
    try:
        ipaddress.IPv4Address(ip)
        if not ipaddress.ip_address(ip).is_global:
            app.logger.debug('No global IP address given on /querySingleIP: %s' % str(request.args.get('ip')))
            return False

    except:
        app.logger.debug('No valid IP given on /querySingleIP: %s' % str(request.args.get('ip')))
        return False

    try:
        res = es.search(index=app.config['ELASTICINDEX'], body={
          "query": {
            "bool": {
              "must": [
                {
                  "term": {
                    "sourceEntryIp": str(ip)
                  }
                },
                {
                  "term": {
                    "clientDomain": clientDomain
                  }
                }
              ]
            }
          },
          "size": maxAlerts,
          "sort": {
            "createTime": {
              "order": "desc"
            }
          },
          "_source": [
            "createTime",
            "peerType",
            "targetCountry",
            "originalRequestString"
          ]
        })
        return res["hits"]["hits"]
    except ElasticsearchException as err:
        app.logger.error('ElasticSearch error: %s' %  err)

    return False

# Formatting functions
项目:kinto-elasticsearch    作者:Kinto    | 项目源码 | 文件源码
def search_view(request, **kwargs):
    bucket_id = request.matchdict['bucket_id']
    collection_id = request.matchdict['collection_id']

    # Limit the number of results to return, based on existing Kinto settings.
    paginate_by = request.registry.settings.get("paginate_by")
    max_fetch_size = request.registry.settings["storage_max_fetch_size"]
    if paginate_by is None or paginate_by <= 0:
        paginate_by = max_fetch_size
    configured = min(paginate_by, max_fetch_size)
    # If the size is specified in query, ignore it if larger than setting.
    specified = None
    if "body" in kwargs:
        try:
            body = json.loads(kwargs["body"].decode("utf-8"))
            specified = body.get("size")
        except json.decoder.JSONDecodeError:
            pass
    if specified is None or specified > configured:
        kwargs.setdefault("size", configured)

    # Access indexer from views using registry.
    indexer = request.registry.indexer
    try:
        results = indexer.search(bucket_id, collection_id, **kwargs)

    except elasticsearch.NotFoundError as e:
        # If plugin was enabled after the creation of the collection.
        indexer.create_index(bucket_id, collection_id)
        results = indexer.search(bucket_id, collection_id, **kwargs)

    except elasticsearch.RequestError as e:
        # Malformed query.
        if isinstance(e.info["error"], dict):
            message = e.info["error"]["reason"]
            details = e.info["error"]["root_cause"][0]
        else:
            message = e.info["error"]
            details = None
        response = http_error(httpexceptions.HTTPBadRequest(),
                              errno=ERRORS.INVALID_PARAMETERS,
                              message=message,
                              details=details)
        raise response

    except elasticsearch.ElasticsearchException as e:
        # General failure.
        logger.exception("Index query failed.")
        results = {}

    return results
项目:mod-elastic-logs    作者:descrepes    | 项目源码 | 文件源码
def commit_logs(self):
        """
        Periodically called (commit_period), this method prepares a bunch of queued logs (commit_colume) to insert them in the index
        """
        if not self.logs_cache:
            return

        if not self.is_connected == CONNECTED:
            if not self.open():
                logger.warning("[elastic-logs] log commiting failed")
                logger.warning("[elastic-logs] %d lines to insert in the index", len(self.logs_cache))
                return

        logger.debug("[elastic-logs] commiting ...")

        logger.debug("[elastic-logs] %d lines to insert in the index (max insertion is %d lines)", len(self.logs_cache), self.commit_volume)

        # Flush all the stored log lines
        logs_to_commit = 1
        now = time.time()
        some_logs = []
        while True:
            try:
                # result = self.db[self.logs_collection].insert_one(self.logs_cache.popleft())
                some_logs.append(self.logs_cache.popleft())
                logs_to_commit = logs_to_commit + 1
                if logs_to_commit >= self.commit_volume:
                    break
            except IndexError:
                logger.debug("[elastic-logs] prepared all available logs for commit")
                break
            except Exception, exp:
                logger.error("[elastic-logs] exception: %s", str(exp))

        logger.debug("[elastic-logs] time to prepare %s logs for commit (%2.4f)", logs_to_commit, time.time() - now)

        now = time.time()
        try:
            # Insert lines to commit
            result = helpers.bulk(self.es,some_logs,self.commit_volume)
            logger.debug("[elastic-logs] inserted %d logs.", result)

        except ElasticsearchException, exp:
            self.close()
            logger.error("[elastic-logs] Error occurred when commiting: %s", exp)

        logger.debug("[elastic-logs] time to insert %s logs (%2.4f)", logs_to_commit, time.time() - now)
项目:georef-api    作者:datosgobar    | 项目源码 | 文件源码
def query_streets(name=None, locality=None, department=None, state=None,
                  road=None, max=None, fields=[]):
    """Busca calles según parámetros de búsqueda de una consulta.

    Args:
        name (str): Nombre de la calle para filtrar (opcional).
        locality (str): Nombre de la localidad para filtrar (opcional).
        department (str): Nombre de departamento para filtrar (opcional).
        state (str): ID o nombre de provincia para filtrar (opcional).
        road_type (str): Nombre del tipo de camino para filtrar (opcional).
        max (int): Limita la cantidad de resultados (opcional).
        fields (list): Campos a devolver en los resultados (opcional).

    Returns:
        list: Resultados de búsqueda de calles.
    """
    index = 'calles-*'  # Search in all indexes by default.
    terms = []
    if name:
        condition = build_condition(NAME, get_abbreviated(name), fuzzy=True)
        terms.append(condition)
    if road:
        condition = build_condition(ROAD_TYPE, road, fuzzy=True)
        terms.append(condition)
    if locality:
        condition = build_condition(LOCALITY, locality, fuzzy=True)
        terms.append(condition)
    if department:
        condition = build_condition(DEPT, department, fuzzy=True)
        terms.append(condition)
    if state:
        target_state = query_entity(STATES, state, max=1)
        if target_state:  # Narrows search to specific index.
            index = '-'.join([STREETS, target_state[0][ID]])
    if LOCATION in fields:
        fields.extend([GEOM, START_R, START_L, END_R, END_L, FULL_NAME])

    query = {'query': {'bool': {'must': terms}} if terms else {"match_all": {}},
             'size': max or 10, '_source': fields}
    try:
        result = Elasticsearch().search(index=index, body=query)
    except ElasticsearchException as error:
        return []

    return [parse_es(hit) for hit in result['hits']['hits']]
项目:georef-api    作者:datosgobar    | 项目源码 | 文件源码
def query_entity(index, entity_id=None, name=None, department=None, state=None,
                 max=None, order=None, fields=[], flatten=False):
    """Busca entidades políticas (localidades, departamentos, o provincias)
        según parámetros de búsqueda de una consulta.

    Args:
        index (str): Nombre del índice sobre el cual realizar la búsqueda.
        name (str): Nombre del tipo de entidad (opcional).
        department (str): ID o nombre de departamento para filtrar (opcional).
        state (str): ID o nombre de provincia para filtrar (opcional).
        max (int): Limita la cantidad de resultados (opcional).
        order (str): Campo por el cual ordenar los resultados (opcional).
        fields (list): Campos a devolver en los resultados (opcional).

    Returns:
        list: Resultados de búsqueda de entidades.
    """
    terms = []
    sorts = {}
    if entity_id:
        condition = build_condition(ID, entity_id)
        terms.append(condition)
    if name:
        condition = build_condition(NAME, name, fuzzy=True)
        terms.append(condition)
    if department:
        if department.isdigit():
            condition = build_condition(DEPT_ID, department)
        else:
            condition = build_condition(DEPT_NAME, department, fuzzy=True)
        terms.append(condition)
    if state:
        if state.isdigit():
            condition = build_condition(STATE_ID, state)
        else:
            if len(state.split()) == 1:
                condition = build_condition(STATE_NAME, state, fuzzy=True)
            else:
                condition = build_condition(STATE_NAME, state,
                                        kind='match_phrase_prefix')
        terms.append(condition)
    if order:
        if ID in order: sorts[ID_KEYWORD] = {'order': 'asc'}
        if NAME in order: sorts[NAME_KEYWORD] = {'order': 'asc'}

    query = {'query': {'bool': {'must': terms}} if terms else {"match_all": {}},
             'size': max or 10, 'sort': sorts, '_source': fields}
    try:
        result = Elasticsearch().search(index=index, body=query)
    except ElasticsearchException as error:
        return []

    return [parse_entity(hit, flatten) for hit in result['hits']['hits']]