@Override public List<Field> createFields(String name, Number value, boolean indexed, boolean docValued, boolean stored) { List<Field> fields = new ArrayList<>(); if (indexed) { fields.add(new FloatPoint(name, value.floatValue())); } if (docValued) { fields.add(new SortedNumericDocValuesField(name, NumericUtils.floatToSortableInt(value.floatValue()))); } if (stored) { fields.add(new StoredField(name, value.floatValue())); } return fields; }
@Override public List<Field> createFields(String name, Number value, boolean indexed, boolean docValued, boolean stored) { List<Field> fields = new ArrayList<>(); if (indexed) { fields.add(new DoublePoint(name, value.doubleValue())); } if (docValued) { fields.add(new SortedNumericDocValuesField(name, NumericUtils.doubleToSortableLong(value.doubleValue()))); } if (stored) { fields.add(new StoredField(name, value.doubleValue())); } return fields; }
@Override protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); if (includeDefaults || pathType != Defaults.PATH_TYPE) { builder.field("path", pathType.name().toLowerCase(Locale.ROOT)); } if (includeDefaults || fieldType().isLatLonEnabled() != GeoPointFieldMapper.Defaults.ENABLE_LATLON) { builder.field("lat_lon", fieldType().isLatLonEnabled()); } if (fieldType().isLatLonEnabled() && (includeDefaults || fieldType().latFieldType().numericPrecisionStep() != NumericUtils.PRECISION_STEP_DEFAULT)) { builder.field("precision_step", fieldType().latFieldType().numericPrecisionStep()); } if (includeDefaults || fieldType().isGeoHashEnabled() != Defaults.ENABLE_GEOHASH) { builder.field("geohash", fieldType().isGeoHashEnabled()); } if (includeDefaults || fieldType().isGeoHashPrefixEnabled() != Defaults.ENABLE_GEOHASH_PREFIX) { builder.field("geohash_prefix", fieldType().isGeoHashPrefixEnabled()); } if (fieldType().isGeoHashEnabled() && (includeDefaults || fieldType().geoHashPrecision() != Defaults.GEO_HASH_PRECISION)) { builder.field("geohash_precision", fieldType().geoHashPrecision()); } if (includeDefaults || ignoreMalformed.explicit()) { builder.field(Names.IGNORE_MALFORMED, ignoreMalformed.value()); } }
public String normalizeFieldValue(String field, Object value) { if (NumberUtils.isNumber(value.toString())) { Number n = NumberUtils.createNumber(value.toString()); if (n instanceof Integer) return NumericUtils.intToPrefixCoded((Integer) n); else if (n instanceof Long) return NumericUtils.longToPrefixCoded((Long) n); else if (n instanceof Float) return NumericUtils.floatToPrefixCoded((Float) n); else if (n instanceof Double) return NumericUtils.doubleToPrefixCoded((Double) n); else throw new IllegalArgumentException("Unhandled numeric type: " + n.getClass()); } else { throw new IllegalArgumentException("Value is not a number: " + value); } }
public void testIntFieldMinMax() throws Exception { Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir); int numDocs = atLeast(100); int minValue = Integer.MAX_VALUE; int maxValue = Integer.MIN_VALUE; for(int i=0;i<numDocs;i++ ){ Document doc = new Document(); int num = random().nextInt(); minValue = Math.min(num, minValue); maxValue = Math.max(num, maxValue); doc.add(new IntField("field", num, Field.Store.NO)); w.addDocument(doc); } IndexReader r = w.getReader(); Terms terms = MultiFields.getTerms(r, "field"); assertEquals(minValue, NumericUtils.getMinInt(terms)); assertEquals(maxValue, NumericUtils.getMaxInt(terms)); r.close(); w.close(); dir.close(); }
public void testLongFieldMinMax() throws Exception { Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir); int numDocs = atLeast(100); long minValue = Long.MAX_VALUE; long maxValue = Long.MIN_VALUE; for(int i=0;i<numDocs;i++ ){ Document doc = new Document(); long num = random().nextLong(); minValue = Math.min(num, minValue); maxValue = Math.max(num, maxValue); doc.add(new LongField("field", num, Field.Store.NO)); w.addDocument(doc); } IndexReader r = w.getReader(); Terms terms = MultiFields.getTerms(r, "field"); assertEquals(minValue, NumericUtils.getMinLong(terms)); assertEquals(maxValue, NumericUtils.getMaxLong(terms)); r.close(); w.close(); dir.close(); }
public void testFloatFieldMinMax() throws Exception { Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir); int numDocs = atLeast(100); float minValue = Float.POSITIVE_INFINITY; float maxValue = Float.NEGATIVE_INFINITY; for(int i=0;i<numDocs;i++ ){ Document doc = new Document(); float num = random().nextFloat(); minValue = Math.min(num, minValue); maxValue = Math.max(num, maxValue); doc.add(new FloatField("field", num, Field.Store.NO)); w.addDocument(doc); } IndexReader r = w.getReader(); Terms terms = MultiFields.getTerms(r, "field"); assertEquals(minValue, NumericUtils.sortableIntToFloat(NumericUtils.getMinInt(terms)), 0.0f); assertEquals(maxValue, NumericUtils.sortableIntToFloat(NumericUtils.getMaxInt(terms)), 0.0f); r.close(); w.close(); dir.close(); }
public void testDoubleFieldMinMax() throws Exception { Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir); int numDocs = atLeast(100); double minValue = Double.POSITIVE_INFINITY; double maxValue = Double.NEGATIVE_INFINITY; for(int i=0;i<numDocs;i++ ){ Document doc = new Document(); double num = random().nextDouble(); minValue = Math.min(num, minValue); maxValue = Math.max(num, maxValue); doc.add(new DoubleField("field", num, Field.Store.NO)); w.addDocument(doc); } IndexReader r = w.getReader(); Terms terms = MultiFields.getTerms(r, "field"); assertEquals(minValue, NumericUtils.sortableLongToDouble(NumericUtils.getMinLong(terms)), 0.0); assertEquals(maxValue, NumericUtils.sortableLongToDouble(NumericUtils.getMaxLong(terms)), 0.0); r.close(); w.close(); dir.close(); }
public void testLongStream() throws Exception { final NumericTokenStream stream=new NumericTokenStream().setLongValue(lvalue); final TermToBytesRefAttribute bytesAtt = stream.getAttribute(TermToBytesRefAttribute.class); assertNotNull(bytesAtt); final TypeAttribute typeAtt = stream.getAttribute(TypeAttribute.class); assertNotNull(typeAtt); final NumericTokenStream.NumericTermAttribute numericAtt = stream.getAttribute(NumericTokenStream.NumericTermAttribute.class); assertNotNull(numericAtt); final BytesRef bytes = bytesAtt.getBytesRef(); stream.reset(); assertEquals(64, numericAtt.getValueSize()); for (int shift=0; shift<64; shift+=NumericUtils.PRECISION_STEP_DEFAULT) { assertTrue("New token is available", stream.incrementToken()); assertEquals("Shift value wrong", shift, numericAtt.getShift()); bytesAtt.fillBytesRef(); assertEquals("Term is incorrectly encoded", lvalue & ~((1L << shift) - 1L), NumericUtils.prefixCodedToLong(bytes)); assertEquals("Term raw value is incorrectly encoded", lvalue & ~((1L << shift) - 1L), numericAtt.getRawValue()); assertEquals("Type incorrect", (shift == 0) ? NumericTokenStream.TOKEN_TYPE_FULL_PREC : NumericTokenStream.TOKEN_TYPE_LOWER_PREC, typeAtt.type()); } assertFalse("More tokens available", stream.incrementToken()); stream.end(); stream.close(); }
public void testIntStream() throws Exception { final NumericTokenStream stream=new NumericTokenStream().setIntValue(ivalue); final TermToBytesRefAttribute bytesAtt = stream.getAttribute(TermToBytesRefAttribute.class); assertNotNull(bytesAtt); final TypeAttribute typeAtt = stream.getAttribute(TypeAttribute.class); assertNotNull(typeAtt); final NumericTokenStream.NumericTermAttribute numericAtt = stream.getAttribute(NumericTokenStream.NumericTermAttribute.class); assertNotNull(numericAtt); final BytesRef bytes = bytesAtt.getBytesRef(); stream.reset(); assertEquals(32, numericAtt.getValueSize()); for (int shift=0; shift<32; shift+=NumericUtils.PRECISION_STEP_DEFAULT) { assertTrue("New token is available", stream.incrementToken()); assertEquals("Shift value wrong", shift, numericAtt.getShift()); bytesAtt.fillBytesRef(); assertEquals("Term is incorrectly encoded", ivalue & ~((1 << shift) - 1), NumericUtils.prefixCodedToInt(bytes)); assertEquals("Term raw value is incorrectly encoded", ((long) ivalue) & ~((1L << shift) - 1L), numericAtt.getRawValue()); assertEquals("Type incorrect", (shift == 0) ? NumericTokenStream.TOKEN_TYPE_FULL_PREC : NumericTokenStream.TOKEN_TYPE_LOWER_PREC, typeAtt.type()); } assertFalse("More tokens available", stream.incrementToken()); stream.end(); stream.close(); }
@Override public String indexedToReadable(String _indexedForm) { final BytesRef indexedForm = new BytesRef(_indexedForm); switch (type) { case INTEGER: return Integer.toString( NumericUtils.prefixCodedToInt(indexedForm) ); case FLOAT: return Float.toString( NumericUtils.sortableIntToFloat(NumericUtils.prefixCodedToInt(indexedForm)) ); case LONG: return Long.toString( NumericUtils.prefixCodedToLong(indexedForm) ); case DOUBLE: return Double.toString( NumericUtils.sortableLongToDouble(NumericUtils.prefixCodedToLong(indexedForm)) ); case DATE: return dateField.toExternal( new Date(NumericUtils.prefixCodedToLong(indexedForm)) ); default: throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown type for trie field: " + type); } }
@Override public Object toObject(SchemaField sf, BytesRef term) { switch (type) { case INTEGER: return NumericUtils.prefixCodedToInt(term); case FLOAT: return NumericUtils.sortableIntToFloat(NumericUtils.prefixCodedToInt(term)); case LONG: return NumericUtils.prefixCodedToLong(term); case DOUBLE: return NumericUtils.sortableLongToDouble(NumericUtils.prefixCodedToLong(term)); case DATE: return new Date(NumericUtils.prefixCodedToLong(term)); default: throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown type for trie field: " + type); } }
/** * Get the document for the user group * * @param user group - user group to create the document from * @return - the created document */ private Document getDocument(IrUserGroup userGroup) { Document doc = new Document(); doc.add(new Field(ID, NumericUtils.longToPrefixCoded(userGroup.getId()), Field.Store.YES, Field.Index.NOT_ANALYZED)); String name = userGroup.getName(); doc.add(new Field(NAME, name, Field.Store.YES, Field.Index.ANALYZED)); if(userGroup.getDescription() != null && !userGroup.getDescription().equals("")) { doc.add(new Field(DESCRIPTION, userGroup.getDescription(), Field.Store.YES, Field.Index.ANALYZED)); } return doc; }
/** * Get the document for the institutional collection * * @param institutional collection - institutional collection to create the document from * @return - the created document */ private Document getDocument(InstitutionalCollection collection) { Document doc = new Document(); doc.add(new Field(ID, NumericUtils.longToPrefixCoded(collection.getId()), Field.Store.YES, Field.Index.NOT_ANALYZED)); String name = collection.getName(); doc.add(new Field(NAME, name, Field.Store.YES, Field.Index.ANALYZED)); if(collection.getDescription() != null && !collection.getDescription().equals("")) { doc.add(new Field(DESCRIPTION, collection.getDescription(), Field.Store.YES, Field.Index.ANALYZED)); } return doc; }
/** * Set up the filters for collections - this is for searching within collections. * * @param collection - to search within * @return - created filter * @throws ParseException */ private List<Filter> getCollectionFilters(InstitutionalCollection collection) throws ParseException { List<Filter> filters = new LinkedList<Filter>(); //isolate the collection root Term t = new Term("collection_root_id", NumericUtils.longToPrefixCoded(collection.getTreeRoot().getId())); Query subQuery = new TermQuery( t ); filters.add(new QueryWrapperFilter(subQuery)); //isolate the range of children subQuery = NumericRangeQuery.newLongRange("collection_left_value", collection.getLeftValue(), collection.getRightValue(), true, true); filters.add(new QueryWrapperFilter(subQuery)); return filters; }
/** * Makes the value to query. * <br/>The value to query is derived from NumericUtils.longToPrefixCoded(timestamp.getTime(). * @param value to input query value * @param isLowerBoundary true if this is a lower boundary of a range query * @param isUpperBoundary true if this is a upper boundary of a range query * @return the value to query * @throws DiscoveryException if the supplied value cannot be converted */ @Override protected String makeValueToQuery(String value, boolean isLowerBoundary, boolean isUpperBoundary) throws DiscoveryException { try { PropertyValueType valueType = PropertyValueType.TIMESTAMP; Timestamp tsValue = (Timestamp)valueType.evaluate( value,isLowerBoundary,isUpperBoundary); if (tsValue == null) return null; if (isLowerBoundary) { LOGGER.finer("Lower boundary timestamp to query: "+tsValue); } else if (isUpperBoundary) { LOGGER.finer("Upper boundary timestamp to query: "+tsValue); } else { LOGGER.finer("Timestamp to query: "+tsValue); } return NumericUtils.longToPrefixCoded(tsValue.getTime()); } catch (IllegalArgumentException e) { throw new DiscoveryException("Invalid date: "+value); } }
public void testLongStream() throws Exception { final NumericTokenStream stream=new NumericTokenStream().setLongValue(lvalue); // use getAttribute to test if attributes really exist, if not an IAE will be throwed final TermToBytesRefAttribute bytesAtt = stream.getAttribute(TermToBytesRefAttribute.class); final TypeAttribute typeAtt = stream.getAttribute(TypeAttribute.class); final NumericTokenStream.NumericTermAttribute numericAtt = stream.getAttribute(NumericTokenStream.NumericTermAttribute.class); final BytesRef bytes = bytesAtt.getBytesRef(); stream.reset(); assertEquals(64, numericAtt.getValueSize()); for (int shift=0; shift<64; shift+=NumericUtils.PRECISION_STEP_DEFAULT) { assertTrue("New token is available", stream.incrementToken()); assertEquals("Shift value wrong", shift, numericAtt.getShift()); final int hash = bytesAtt.fillBytesRef(); assertEquals("Hash incorrect", bytes.hashCode(), hash); assertEquals("Term is incorrectly encoded", lvalue & ~((1L << shift) - 1L), NumericUtils.prefixCodedToLong(bytes)); assertEquals("Term raw value is incorrectly encoded", lvalue & ~((1L << shift) - 1L), numericAtt.getRawValue()); assertEquals("Type incorrect", (shift == 0) ? NumericTokenStream.TOKEN_TYPE_FULL_PREC : NumericTokenStream.TOKEN_TYPE_LOWER_PREC, typeAtt.type()); } assertFalse("More tokens available", stream.incrementToken()); stream.end(); stream.close(); }
public void testIntStream() throws Exception { final NumericTokenStream stream=new NumericTokenStream().setIntValue(ivalue); // use getAttribute to test if attributes really exist, if not an IAE will be throwed final TermToBytesRefAttribute bytesAtt = stream.getAttribute(TermToBytesRefAttribute.class); final TypeAttribute typeAtt = stream.getAttribute(TypeAttribute.class); final NumericTokenStream.NumericTermAttribute numericAtt = stream.getAttribute(NumericTokenStream.NumericTermAttribute.class); final BytesRef bytes = bytesAtt.getBytesRef(); stream.reset(); assertEquals(32, numericAtt.getValueSize()); for (int shift=0; shift<32; shift+=NumericUtils.PRECISION_STEP_DEFAULT) { assertTrue("New token is available", stream.incrementToken()); assertEquals("Shift value wrong", shift, numericAtt.getShift()); final int hash = bytesAtt.fillBytesRef(); assertEquals("Hash incorrect", bytes.hashCode(), hash); assertEquals("Term is incorrectly encoded", ivalue & ~((1 << shift) - 1), NumericUtils.prefixCodedToInt(bytes)); assertEquals("Term raw value is incorrectly encoded", ((long) ivalue) & ~((1L << shift) - 1L), numericAtt.getRawValue()); assertEquals("Type incorrect", (shift == 0) ? NumericTokenStream.TOKEN_TYPE_FULL_PREC : NumericTokenStream.TOKEN_TYPE_LOWER_PREC, typeAtt.type()); } assertFalse("More tokens available", stream.incrementToken()); stream.end(); stream.close(); }
@Override public void readableToIndexed(CharSequence val, BytesRef result) { String s = val.toString(); switch (type) { case INTEGER: NumericUtils.intToPrefixCodedBytes(Integer.parseInt(s), 0, result); break; case FLOAT: NumericUtils.intToPrefixCodedBytes(NumericUtils.floatToSortableInt(Float.parseFloat(s)), 0, result); break; case LONG: NumericUtils.longToPrefixCodedBytes(Long.parseLong(s), 0, result); break; case DOUBLE: NumericUtils.longToPrefixCodedBytes(NumericUtils.doubleToSortableLong(Double.parseDouble(s)), 0, result); break; case DATE: NumericUtils.longToPrefixCodedBytes(dateField.parseMath(null, s).getTime(), 0, result); break; default: throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown type for trie field: " + type); } }
@Override public byte[] getBytes(Range r) { byte[] b = new byte[Float.BYTES*2]; NumericUtils.intToSortableBytes(NumericUtils.floatToSortableInt(r.from.floatValue()), b, 0); NumericUtils.intToSortableBytes(NumericUtils.floatToSortableInt(r.to.floatValue()), b, Float.BYTES); return b; }
@Override public byte[] getBytes(Range r) { byte[] b = new byte[Double.BYTES*2]; NumericUtils.longToSortableBytes(NumericUtils.doubleToSortableLong(r.from.doubleValue()), b, 0); NumericUtils.longToSortableBytes(NumericUtils.doubleToSortableLong(r.to.doubleValue()), b, Double.BYTES); return b; }
@Override public byte[] getBytes(Range r) { byte[] b = new byte[Integer.BYTES*2]; NumericUtils.intToSortableBytes(r.from.intValue(), b, 0); NumericUtils.intToSortableBytes(r.to.intValue(), b, Integer.BYTES); return b; }
@Override public byte[] getBytes(Range r) { byte[] b = new byte[Long.BYTES*2]; long from = r.from == null ? Long.MIN_VALUE : r.from.longValue(); long to = r.to == null ? Long.MAX_VALUE : r.to.longValue(); NumericUtils.longToSortableBytes(from, b, 0); NumericUtils.longToSortableBytes(to, b, Long.BYTES); return b; }
@Override Query rangeQuery(String field, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, boolean hasDocValues) { float l = Float.NEGATIVE_INFINITY; float u = Float.POSITIVE_INFINITY; if (lowerTerm != null) { l = parse(lowerTerm, false); if (includeLower == false) { l = FloatPoint.nextUp(l); } } if (upperTerm != null) { u = parse(upperTerm, false); if (includeUpper == false) { u = FloatPoint.nextDown(u); } } Query query = FloatPoint.newRangeQuery(field, l, u); if (hasDocValues) { Query dvQuery = SortedNumericDocValuesField.newRangeQuery(field, NumericUtils.floatToSortableInt(l), NumericUtils.floatToSortableInt(u)); query = new IndexOrDocValuesQuery(query, dvQuery); } return query; }
@Override Query rangeQuery(String field, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, boolean hasDocValues) { double l = Double.NEGATIVE_INFINITY; double u = Double.POSITIVE_INFINITY; if (lowerTerm != null) { l = parse(lowerTerm, false); if (includeLower == false) { l = DoublePoint.nextUp(l); } } if (upperTerm != null) { u = parse(upperTerm, false); if (includeUpper == false) { u = DoublePoint.nextDown(u); } } Query query = DoublePoint.newRangeQuery(field, l, u); if (hasDocValues) { Query dvQuery = SortedNumericDocValuesField.newRangeQuery(field, NumericUtils.doubleToSortableLong(l), NumericUtils.doubleToSortableLong(u)); query = new IndexOrDocValuesQuery(query, dvQuery); } return query; }
public void testRandomDoubles() throws IOException { MappedFieldType ft = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE); ft.setName("field"); final ExtendedSimpleStatsAggregator expected = new ExtendedSimpleStatsAggregator(); testCase(ft, iw -> { int numDocs = randomIntBetween(10, 50); for (int i = 0; i < numDocs; i++) { Document doc = new Document(); int numValues = randomIntBetween(1, 5); for (int j = 0; j < numValues; j++) { double value = randomDoubleBetween(-100d, 100d, true); long valueAsLong = NumericUtils.doubleToSortableLong(value); doc.add(new SortedNumericDocValuesField("field", valueAsLong)); expected.add(value); } iw.addDocument(doc); } }, stats -> { assertEquals(expected.count, stats.getCount(), 0); assertEquals(expected.sum, stats.getSum(), TOLERANCE); assertEquals(expected.min, stats.getMin(), 0); assertEquals(expected.max, stats.getMax(), 0); assertEquals(expected.sum / expected.count, stats.getAvg(), TOLERANCE); assertEquals(expected.sumOfSqrs, stats.getSumOfSquares(), TOLERANCE); assertEquals(expected.stdDev(), stats.getStdDeviation(), TOLERANCE); assertEquals(expected.variance(), stats.getVariance(), TOLERANCE); assertEquals(expected.stdDevBound(ExtendedStats.Bounds.LOWER, stats.getSigma()), stats.getStdDeviationBound(ExtendedStats.Bounds.LOWER), TOLERANCE); assertEquals(expected.stdDevBound(ExtendedStats.Bounds.UPPER, stats.getSigma()), stats.getStdDeviationBound(ExtendedStats.Bounds.UPPER), TOLERANCE); } ); }
public void testSimple() throws IOException { try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { for (double value : new double[] {3, 0.2, 10}) { Document doc = new Document(); doc.add(new SortedNumericDocValuesField("field", NumericUtils.doubleToSortableLong(value))); w.addDocument(doc); } PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg") .field("field") .method(PercentilesMethod.HDR) .values(0.1, 0.5, 12); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE); fieldType.setName("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); PercentileRanks ranks = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); Iterator<Percentile> rankIterator = ranks.iterator(); Percentile rank = rankIterator.next(); assertEquals(0.1, rank.getValue(), 0d); assertThat(rank.getPercent(), Matchers.equalTo(0d)); rank = rankIterator.next(); assertEquals(0.5, rank.getValue(), 0d); assertThat(rank.getPercent(), Matchers.greaterThan(0d)); assertThat(rank.getPercent(), Matchers.lessThan(100d)); rank = rankIterator.next(); assertEquals(12, rank.getValue(), 0d); assertThat(rank.getPercent(), Matchers.equalTo(100d)); assertFalse(rankIterator.hasNext()); } } }
public void testSimple() throws IOException { try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { for (double value : new double[] {3, 0.2, 10}) { Document doc = new Document(); doc.add(new SortedNumericDocValuesField("field", NumericUtils.doubleToSortableLong(value))); w.addDocument(doc); } PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg") .field("field") .method(PercentilesMethod.TDIGEST) .values(0.1, 0.5, 12); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE); fieldType.setName("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); PercentileRanks ranks = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); Iterator<Percentile> rankIterator = ranks.iterator(); Percentile rank = rankIterator.next(); assertEquals(0.1, rank.getValue(), 0d); // TODO: Fix T-Digest: this assertion should pass but we currently get ~15 // https://github.com/elastic/elasticsearch/issues/14851 // assertThat(rank.getPercent(), Matchers.equalTo(0d)); rank = rankIterator.next(); assertEquals(0.5, rank.getValue(), 0d); assertThat(rank.getPercent(), Matchers.greaterThan(0d)); assertThat(rank.getPercent(), Matchers.lessThan(100d)); rank = rankIterator.next(); assertEquals(12, rank.getValue(), 0d); // TODO: Fix T-Digest: this assertion should pass but we currently get ~59 // https://github.com/elastic/elasticsearch/issues/14851 // assertThat(rank.getPercent(), Matchers.equalTo(100d)); assertFalse(rankIterator.hasNext()); } } }
public void testRandomDoubles() throws IOException { MappedFieldType ft = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE); ft.setName("field"); final SimpleStatsAggregator expected = new SimpleStatsAggregator(); testCase(ft, iw -> { int numDocs = randomIntBetween(10, 50); for (int i = 0; i < numDocs; i++) { Document doc = new Document(); int numValues = randomIntBetween(1, 5); for (int j = 0; j < numValues; j++) { double value = randomDoubleBetween(-100d, 100d, true); long valueAsLong = NumericUtils.doubleToSortableLong(value); doc.add(new SortedNumericDocValuesField("field", valueAsLong)); expected.add(value); } iw.addDocument(doc); } }, stats -> { assertEquals(expected.count, stats.getCount(), 0); assertEquals(expected.sum, stats.getSum(), TOLERANCE); assertEquals(expected.min, stats.getMin(), 0); assertEquals(expected.max, stats.getMax(), 0); assertEquals(expected.sum / expected.count, stats.getAvg(), TOLERANCE); } ); }
public void testDoubles() throws Exception { try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { for (double value : new double[] {9.3, 3.2, -10, -6.5, 5.3, 50.1}) { Document doc = new Document(); doc.add(new SortedNumericDocValuesField("field", NumericUtils.doubleToSortableLong(value))); w.addDocument(doc); } HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") .field("field") .interval(5); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE); fieldType.setName("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); Histogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertEquals(4, histogram.getBuckets().size()); assertEquals(-10d, histogram.getBuckets().get(0).getKey()); assertEquals(2, histogram.getBuckets().get(0).getDocCount()); assertEquals(0d, histogram.getBuckets().get(1).getKey()); assertEquals(1, histogram.getBuckets().get(1).getDocCount()); assertEquals(5d, histogram.getBuckets().get(2).getKey()); assertEquals(2, histogram.getBuckets().get(2).getDocCount()); assertEquals(50d, histogram.getBuckets().get(3).getKey()); assertEquals(1, histogram.getBuckets().get(3).getDocCount()); } } }
public void testOffset() throws Exception { try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { for (double value : new double[] {9.3, 3.2, -5, -6.5, 5.3}) { Document doc = new Document(); doc.add(new SortedNumericDocValuesField("field", NumericUtils.doubleToSortableLong(value))); w.addDocument(doc); } HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") .field("field") .interval(5) .offset(Math.PI); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE); fieldType.setName("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); Histogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertEquals(3, histogram.getBuckets().size()); assertEquals(-10 + Math.PI, histogram.getBuckets().get(0).getKey()); assertEquals(2, histogram.getBuckets().get(0).getDocCount()); assertEquals(Math.PI, histogram.getBuckets().get(1).getKey()); assertEquals(2, histogram.getBuckets().get(1).getDocCount()); assertEquals(5 + Math.PI, histogram.getBuckets().get(2).getKey()); assertEquals(1, histogram.getBuckets().get(2).getDocCount()); } } }