Java 类org.apache.lucene.index.PointValues 实例源码

项目:elasticsearch_my    文件:NumberFieldMapper.java   
@Override
FieldStats.Double stats(IndexReader reader, String fieldName,
                        boolean isSearchable, boolean isAggregatable) throws IOException {
    FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(fieldName);
    if (fi == null) {
        return null;
    }
    long size = PointValues.size(reader, fieldName);
    if (size == 0) {
        return new FieldStats.Double(reader.maxDoc(), 0, -1, -1, isSearchable, isAggregatable);
    }
    int docCount = PointValues.getDocCount(reader, fieldName);
    byte[] min = PointValues.getMinPackedValue(reader, fieldName);
    byte[] max = PointValues.getMaxPackedValue(reader, fieldName);
    return new FieldStats.Double(reader.maxDoc(), docCount, -1L, size,
        isSearchable, isAggregatable,
        HalfFloatPoint.decodeDimension(min, 0), HalfFloatPoint.decodeDimension(max, 0));
}
项目:elasticsearch_my    文件:NumberFieldMapper.java   
@Override
FieldStats.Double stats(IndexReader reader, String fieldName,
                        boolean isSearchable, boolean isAggregatable) throws IOException {
    FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(fieldName);
    if (fi == null) {
        return null;
    }
    long size = PointValues.size(reader, fieldName);
    if (size == 0) {
        return new FieldStats.Double(reader.maxDoc(), 0, -1, -1, isSearchable, isAggregatable);
    }
    int docCount = PointValues.getDocCount(reader, fieldName);
    byte[] min = PointValues.getMinPackedValue(reader, fieldName);
    byte[] max = PointValues.getMaxPackedValue(reader, fieldName);
    return new FieldStats.Double(reader.maxDoc(),docCount, -1L, size,
        isSearchable, isAggregatable,
        FloatPoint.decodeDimension(min, 0), FloatPoint.decodeDimension(max, 0));
}
项目:elasticsearch_my    文件:NumberFieldMapper.java   
@Override
FieldStats.Double stats(IndexReader reader, String fieldName,
                        boolean isSearchable, boolean isAggregatable) throws IOException {
    FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(fieldName);
    if (fi == null) {
        return null;
    }
    long size = PointValues.size(reader, fieldName);
    if (size == 0) {
        return new FieldStats.Double(reader.maxDoc(),0, -1, -1, isSearchable, isAggregatable);
    }
    int docCount = PointValues.getDocCount(reader, fieldName);
    byte[] min = PointValues.getMinPackedValue(reader, fieldName);
    byte[] max = PointValues.getMaxPackedValue(reader, fieldName);
    return new FieldStats.Double(reader.maxDoc(),docCount, -1L, size,
        isSearchable, isAggregatable,
        DoublePoint.decodeDimension(min, 0), DoublePoint.decodeDimension(max, 0));
}
项目:elasticsearch_my    文件:NumberFieldMapper.java   
@Override
FieldStats.Long stats(IndexReader reader, String fieldName,
                      boolean isSearchable, boolean isAggregatable) throws IOException {
    FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(fieldName);
    if (fi == null) {
        return null;
    }
    long size = PointValues.size(reader, fieldName);
    if (size == 0) {
        return new FieldStats.Long(reader.maxDoc(), 0, -1, -1, isSearchable, isAggregatable);
    }
    int docCount = PointValues.getDocCount(reader, fieldName);
    byte[] min = PointValues.getMinPackedValue(reader, fieldName);
    byte[] max = PointValues.getMaxPackedValue(reader, fieldName);
    return new FieldStats.Long(reader.maxDoc(),docCount, -1L, size,
        isSearchable, isAggregatable,
        IntPoint.decodeDimension(min, 0), IntPoint.decodeDimension(max, 0));
}
项目:elasticsearch_my    文件:NumberFieldMapper.java   
@Override
FieldStats.Long stats(IndexReader reader, String fieldName,
                      boolean isSearchable, boolean isAggregatable) throws IOException {
    FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(fieldName);
    if (fi == null) {
        return null;
    }
    long size = PointValues.size(reader, fieldName);
    if (size == 0) {
        return new FieldStats.Long(reader.maxDoc(), 0, -1, -1, isSearchable, isAggregatable);
    }
    int docCount = PointValues.getDocCount(reader, fieldName);
    byte[] min = PointValues.getMinPackedValue(reader, fieldName);
    byte[] max = PointValues.getMaxPackedValue(reader, fieldName);
    return new FieldStats.Long(reader.maxDoc(),docCount, -1L, size,
        isSearchable, isAggregatable,
        LongPoint.decodeDimension(min, 0), LongPoint.decodeDimension(max, 0));
}
项目:elasticsearch_my    文件:DateFieldMapper.java   
@Override
public FieldStats.Date stats(IndexReader reader) throws IOException {
    String field = name();
    FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(name());
    if (fi == null) {
        return null;
    }
    long size = PointValues.size(reader, field);
    if (size == 0) {
        return new FieldStats.Date(reader.maxDoc(), 0, -1, -1, isSearchable(), isAggregatable());
    }
    int docCount = PointValues.getDocCount(reader, field);
    byte[] min = PointValues.getMinPackedValue(reader, field);
    byte[] max = PointValues.getMaxPackedValue(reader, field);
    return new FieldStats.Date(reader.maxDoc(),docCount, -1L, size,
        isSearchable(), isAggregatable(),
        dateTimeFormatter(), LongPoint.decodeDimension(min, 0), LongPoint.decodeDimension(max, 0));
}
项目:elasticsearch_my    文件:GeoPointFieldMapper.java   
@Override
public FieldStats.GeoPoint stats(IndexReader reader) throws IOException {
    String field = name();
    FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(name());
    if (fi == null) {
        return null;
    }
    final long size = PointValues.size(reader, field);
    if (size == 0) {
        return new FieldStats.GeoPoint(reader.maxDoc(), -1L, -1L, -1L, isSearchable(), isAggregatable());
    }
    final int docCount = PointValues.getDocCount(reader, field);
    byte[] min = PointValues.getMinPackedValue(reader, field);
    byte[] max = PointValues.getMaxPackedValue(reader, field);
    GeoPoint minPt = new GeoPoint(GeoEncodingUtils.decodeLatitude(min, 0), GeoEncodingUtils.decodeLongitude(min, Integer.BYTES));
    GeoPoint maxPt = new GeoPoint(GeoEncodingUtils.decodeLatitude(max, 0), GeoEncodingUtils.decodeLongitude(max, Integer.BYTES));
    return new FieldStats.GeoPoint(reader.maxDoc(), docCount, -1L, size, isSearchable(), isAggregatable(),
        minPt, maxPt);
}
项目:elasticsearch_my    文件:IpFieldMapper.java   
@Override
public FieldStats.Ip stats(IndexReader reader) throws IOException {
    String field = name();
    FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(name());
    if (fi == null) {
        return null;
    }
    long size = PointValues.size(reader, field);
    if (size == 0) {
        return new FieldStats.Ip(reader.maxDoc(), 0, -1, -1, isSearchable(), isAggregatable());
    }
    int docCount = PointValues.getDocCount(reader, field);
    byte[] min = PointValues.getMinPackedValue(reader, field);
    byte[] max = PointValues.getMaxPackedValue(reader, field);
    return new FieldStats.Ip(reader.maxDoc(), docCount, -1L, size,
        isSearchable(), isAggregatable(),
        InetAddressPoint.decode(min), InetAddressPoint.decode(max));
}
项目:elasticsearch_my    文件:SeqNoFieldMapper.java   
@Override
public FieldStats stats(IndexReader reader) throws IOException {
    String fieldName = name();
    long size = PointValues.size(reader, fieldName);
    if (size == 0) {
        return null;
    }
    int docCount = PointValues.getDocCount(reader, fieldName);
    byte[] min = PointValues.getMinPackedValue(reader, fieldName);
    byte[] max = PointValues.getMaxPackedValue(reader, fieldName);
    return new FieldStats.Long(reader.maxDoc(),docCount, -1L, size, true, false,
            LongPoint.decodeDimension(min, 0), LongPoint.decodeDimension(max, 0));
}
项目:elasticsearch_my    文件:DateFieldMapper.java   
@Override
public Relation isFieldWithinQuery(IndexReader reader,
        Object from, Object to, boolean includeLower, boolean includeUpper,
        DateTimeZone timeZone, DateMathParser dateParser, QueryRewriteContext context) throws IOException {
    if (dateParser == null) {
        dateParser = this.dateMathParser;
    }

    long fromInclusive = Long.MIN_VALUE;
    if (from != null) {
        fromInclusive = parseToMilliseconds(from, !includeLower, timeZone, dateParser, context);
        if (includeLower == false) {
            if (fromInclusive == Long.MAX_VALUE) {
                return Relation.DISJOINT;
            }
            ++fromInclusive;
        }
    }

    long toInclusive = Long.MAX_VALUE;
    if (to != null) {
        toInclusive = parseToMilliseconds(to, includeUpper, timeZone, dateParser, context);
        if (includeUpper == false) {
            if (toInclusive == Long.MIN_VALUE) {
                return Relation.DISJOINT;
            }
            --toInclusive;
        }
    }

    // This check needs to be done after fromInclusive and toInclusive
    // are resolved so we can throw an exception if they are invalid
    // even if there are no points in the shard
    if (PointValues.size(reader, name()) == 0) {
        // no points, so nothing matches
        return Relation.DISJOINT;
    }

    long minValue = LongPoint.decodeDimension(PointValues.getMinPackedValue(reader, name()), 0);
    long maxValue = LongPoint.decodeDimension(PointValues.getMaxPackedValue(reader, name()), 0);

    if (minValue >= fromInclusive && maxValue <= toInclusive) {
        return Relation.WITHIN;
    } else if (maxValue < fromInclusive || minValue > toInclusive) {
        return Relation.DISJOINT;
    } else {
        return Relation.INTERSECTS;
    }
}
项目:search-guard    文件:EmptyFilterLeafReader.java   
@Override
public PointValues getPointValues(String field) throws IOException {
    return isMeta(field) ? in.getPointValues(field) : null;
}