@Override FieldStats.Double stats(IndexReader reader, String fieldName, boolean isSearchable, boolean isAggregatable) throws IOException { FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(fieldName); if (fi == null) { return null; } long size = PointValues.size(reader, fieldName); if (size == 0) { return new FieldStats.Double(reader.maxDoc(), 0, -1, -1, isSearchable, isAggregatable); } int docCount = PointValues.getDocCount(reader, fieldName); byte[] min = PointValues.getMinPackedValue(reader, fieldName); byte[] max = PointValues.getMaxPackedValue(reader, fieldName); return new FieldStats.Double(reader.maxDoc(), docCount, -1L, size, isSearchable, isAggregatable, HalfFloatPoint.decodeDimension(min, 0), HalfFloatPoint.decodeDimension(max, 0)); }
@Override FieldStats.Double stats(IndexReader reader, String fieldName, boolean isSearchable, boolean isAggregatable) throws IOException { FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(fieldName); if (fi == null) { return null; } long size = PointValues.size(reader, fieldName); if (size == 0) { return new FieldStats.Double(reader.maxDoc(), 0, -1, -1, isSearchable, isAggregatable); } int docCount = PointValues.getDocCount(reader, fieldName); byte[] min = PointValues.getMinPackedValue(reader, fieldName); byte[] max = PointValues.getMaxPackedValue(reader, fieldName); return new FieldStats.Double(reader.maxDoc(),docCount, -1L, size, isSearchable, isAggregatable, FloatPoint.decodeDimension(min, 0), FloatPoint.decodeDimension(max, 0)); }
@Override FieldStats.Double stats(IndexReader reader, String fieldName, boolean isSearchable, boolean isAggregatable) throws IOException { FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(fieldName); if (fi == null) { return null; } long size = PointValues.size(reader, fieldName); if (size == 0) { return new FieldStats.Double(reader.maxDoc(),0, -1, -1, isSearchable, isAggregatable); } int docCount = PointValues.getDocCount(reader, fieldName); byte[] min = PointValues.getMinPackedValue(reader, fieldName); byte[] max = PointValues.getMaxPackedValue(reader, fieldName); return new FieldStats.Double(reader.maxDoc(),docCount, -1L, size, isSearchable, isAggregatable, DoublePoint.decodeDimension(min, 0), DoublePoint.decodeDimension(max, 0)); }
@Override FieldStats.Long stats(IndexReader reader, String fieldName, boolean isSearchable, boolean isAggregatable) throws IOException { FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(fieldName); if (fi == null) { return null; } long size = PointValues.size(reader, fieldName); if (size == 0) { return new FieldStats.Long(reader.maxDoc(), 0, -1, -1, isSearchable, isAggregatable); } int docCount = PointValues.getDocCount(reader, fieldName); byte[] min = PointValues.getMinPackedValue(reader, fieldName); byte[] max = PointValues.getMaxPackedValue(reader, fieldName); return new FieldStats.Long(reader.maxDoc(),docCount, -1L, size, isSearchable, isAggregatable, IntPoint.decodeDimension(min, 0), IntPoint.decodeDimension(max, 0)); }
@Override FieldStats.Long stats(IndexReader reader, String fieldName, boolean isSearchable, boolean isAggregatable) throws IOException { FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(fieldName); if (fi == null) { return null; } long size = PointValues.size(reader, fieldName); if (size == 0) { return new FieldStats.Long(reader.maxDoc(), 0, -1, -1, isSearchable, isAggregatable); } int docCount = PointValues.getDocCount(reader, fieldName); byte[] min = PointValues.getMinPackedValue(reader, fieldName); byte[] max = PointValues.getMaxPackedValue(reader, fieldName); return new FieldStats.Long(reader.maxDoc(),docCount, -1L, size, isSearchable, isAggregatable, LongPoint.decodeDimension(min, 0), LongPoint.decodeDimension(max, 0)); }
@Override public FieldStats.Date stats(IndexReader reader) throws IOException { String field = name(); FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(name()); if (fi == null) { return null; } long size = PointValues.size(reader, field); if (size == 0) { return new FieldStats.Date(reader.maxDoc(), 0, -1, -1, isSearchable(), isAggregatable()); } int docCount = PointValues.getDocCount(reader, field); byte[] min = PointValues.getMinPackedValue(reader, field); byte[] max = PointValues.getMaxPackedValue(reader, field); return new FieldStats.Date(reader.maxDoc(),docCount, -1L, size, isSearchable(), isAggregatable(), dateTimeFormatter(), LongPoint.decodeDimension(min, 0), LongPoint.decodeDimension(max, 0)); }
@Override public FieldStats.GeoPoint stats(IndexReader reader) throws IOException { String field = name(); FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(name()); if (fi == null) { return null; } final long size = PointValues.size(reader, field); if (size == 0) { return new FieldStats.GeoPoint(reader.maxDoc(), -1L, -1L, -1L, isSearchable(), isAggregatable()); } final int docCount = PointValues.getDocCount(reader, field); byte[] min = PointValues.getMinPackedValue(reader, field); byte[] max = PointValues.getMaxPackedValue(reader, field); GeoPoint minPt = new GeoPoint(GeoEncodingUtils.decodeLatitude(min, 0), GeoEncodingUtils.decodeLongitude(min, Integer.BYTES)); GeoPoint maxPt = new GeoPoint(GeoEncodingUtils.decodeLatitude(max, 0), GeoEncodingUtils.decodeLongitude(max, Integer.BYTES)); return new FieldStats.GeoPoint(reader.maxDoc(), docCount, -1L, size, isSearchable(), isAggregatable(), minPt, maxPt); }
@Override public FieldStats.Ip stats(IndexReader reader) throws IOException { String field = name(); FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(name()); if (fi == null) { return null; } long size = PointValues.size(reader, field); if (size == 0) { return new FieldStats.Ip(reader.maxDoc(), 0, -1, -1, isSearchable(), isAggregatable()); } int docCount = PointValues.getDocCount(reader, field); byte[] min = PointValues.getMinPackedValue(reader, field); byte[] max = PointValues.getMaxPackedValue(reader, field); return new FieldStats.Ip(reader.maxDoc(), docCount, -1L, size, isSearchable(), isAggregatable(), InetAddressPoint.decode(min), InetAddressPoint.decode(max)); }
@Override public FieldStats stats(IndexReader reader) throws IOException { String fieldName = name(); long size = PointValues.size(reader, fieldName); if (size == 0) { return null; } int docCount = PointValues.getDocCount(reader, fieldName); byte[] min = PointValues.getMinPackedValue(reader, fieldName); byte[] max = PointValues.getMaxPackedValue(reader, fieldName); return new FieldStats.Long(reader.maxDoc(),docCount, -1L, size, true, false, LongPoint.decodeDimension(min, 0), LongPoint.decodeDimension(max, 0)); }
@Override public Relation isFieldWithinQuery(IndexReader reader, Object from, Object to, boolean includeLower, boolean includeUpper, DateTimeZone timeZone, DateMathParser dateParser, QueryRewriteContext context) throws IOException { if (dateParser == null) { dateParser = this.dateMathParser; } long fromInclusive = Long.MIN_VALUE; if (from != null) { fromInclusive = parseToMilliseconds(from, !includeLower, timeZone, dateParser, context); if (includeLower == false) { if (fromInclusive == Long.MAX_VALUE) { return Relation.DISJOINT; } ++fromInclusive; } } long toInclusive = Long.MAX_VALUE; if (to != null) { toInclusive = parseToMilliseconds(to, includeUpper, timeZone, dateParser, context); if (includeUpper == false) { if (toInclusive == Long.MIN_VALUE) { return Relation.DISJOINT; } --toInclusive; } } // This check needs to be done after fromInclusive and toInclusive // are resolved so we can throw an exception if they are invalid // even if there are no points in the shard if (PointValues.size(reader, name()) == 0) { // no points, so nothing matches return Relation.DISJOINT; } long minValue = LongPoint.decodeDimension(PointValues.getMinPackedValue(reader, name()), 0); long maxValue = LongPoint.decodeDimension(PointValues.getMaxPackedValue(reader, name()), 0); if (minValue >= fromInclusive && maxValue <= toInclusive) { return Relation.WITHIN; } else if (maxValue < fromInclusive || minValue > toInclusive) { return Relation.DISJOINT; } else { return Relation.INTERSECTS; } }
@Override public PointValues getPointValues(String field) throws IOException { return isMeta(field) ? in.getPointValues(field) : null; }