public void testSortValues() throws Exception { Directory dir = new RAMDirectory(); IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); for (int i = 0; i < 10; i++) { Document document = new Document(); String text = new String(new char[]{(char) (97 + i), (char) (97 + i)}); document.add(new TextField("str", text, Field.Store.YES)); document.add(new SortedDocValuesField("str", new BytesRef(text))); indexWriter.addDocument(document); } IndexReader reader = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(indexWriter)); IndexSearcher searcher = new IndexSearcher(reader); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField("str", SortField.Type.STRING))); for (int i = 0; i < 10; i++) { FieldDoc fieldDoc = (FieldDoc) docs.scoreDocs[i]; assertThat((BytesRef) fieldDoc.fields[0], equalTo(new BytesRef(new String(new char[]{(char) (97 + i), (char) (97 + i)})))); } }
void processQuery(Query query, ParseContext context) { ParseContext.Document doc = context.doc(); FieldType pft = (FieldType) this.fieldType(); QueryAnalyzer.Result result; try { result = QueryAnalyzer.analyze(query); } catch (QueryAnalyzer.UnsupportedQueryException e) { doc.add(new Field(pft.extractionResultField.name(), EXTRACTION_FAILED, extractionResultField.fieldType())); return; } for (Term term : result.terms) { BytesRefBuilder builder = new BytesRefBuilder(); builder.append(new BytesRef(term.field())); builder.append(FIELD_VALUE_SEPARATOR); builder.append(term.bytes()); doc.add(new Field(queryTermsField.name(), builder.toBytesRef(), queryTermsField.fieldType())); } if (result.verified) { doc.add(new Field(extractionResultField.name(), EXTRACTION_COMPLETE, extractionResultField.fieldType())); } else { doc.add(new Field(extractionResultField.name(), EXTRACTION_PARTIAL, extractionResultField.fieldType())); } }
/** * Fixes problems with broken analysis chains if positions and offsets are messed up that can lead to * {@link StringIndexOutOfBoundsException} in the {@link FastVectorHighlighter} */ public static WeightedFragInfo fixWeightedFragInfo(FieldMapper mapper, Field[] values, WeightedFragInfo fragInfo) { assert fragInfo != null : "FragInfo must not be null"; assert mapper.fieldType().name().equals(values[0].name()) : "Expected FieldMapper for field " + values[0].name(); if (!fragInfo.getSubInfos().isEmpty() && (containsBrokenAnalysis(mapper.fieldType().indexAnalyzer()))) { /* This is a special case where broken analysis like WDF is used for term-vector creation at index-time * which can potentially mess up the offsets. To prevent a SAIIOBException we need to resort * the fragments based on their offsets rather than using soley the positions as it is done in * the FastVectorHighlighter. Yet, this is really a lucene problem and should be fixed in lucene rather * than in this hack... aka. "we are are working on in!" */ final List<SubInfo> subInfos = fragInfo.getSubInfos(); CollectionUtil.introSort(subInfos, new Comparator<SubInfo>() { @Override public int compare(SubInfo o1, SubInfo o2) { int startOffset = o1.getTermsOffsets().get(0).getStartOffset(); int startOffset2 = o2.getTermsOffsets().get(0).getStartOffset(); return FragmentBuilderHelper.compare(startOffset, startOffset2); } }); return new WeightedFragInfo(Math.min(fragInfo.getSubInfos().get(0).getTermsOffsets().get(0).getStartOffset(), fragInfo.getStartOffset()), fragInfo.getEndOffset(), subInfos, fragInfo.getTotalBoost()); } else { return fragInfo; } }
private Document getDocument(File file) throws IOException { Document document = new Document(); // index file contents Field contentField = new Field(LuceneConstants.CONTENTS, new FileReader(file), TextField.TYPE_NOT_STORED); // index file name Field fileNameField = new Field(LuceneConstants.FILE_NAME, file.getName(), TextField.TYPE_STORED); // index file path Field filePathField = new Field(LuceneConstants.FILE_PATH, file.getCanonicalPath(), TextField.TYPE_STORED); document.add(contentField); document.add(fileNameField); document.add(filePathField); return document; }
/** Test that version map cache works, is evicted on close, etc */ public void testCache() throws Exception { int size = Versions.lookupStates.size(); Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); Document doc = new Document(); doc.add(new Field(UidFieldMapper.NAME, "6", UidFieldMapper.Defaults.FIELD_TYPE)); doc.add(new NumericDocValuesField(VersionFieldMapper.NAME, 87)); writer.addDocument(doc); DirectoryReader reader = DirectoryReader.open(writer); // should increase cache size by 1 assertEquals(87, Versions.loadVersion(reader, new Term(UidFieldMapper.NAME, "6"))); assertEquals(size+1, Versions.lookupStates.size()); // should be cache hit assertEquals(87, Versions.loadVersion(reader, new Term(UidFieldMapper.NAME, "6"))); assertEquals(size+1, Versions.lookupStates.size()); reader.close(); writer.close(); // core should be evicted from the map assertEquals(size, Versions.lookupStates.size()); dir.close(); }
@Override public List<Field> createFields(String name, Number value, boolean indexed, boolean docValued, boolean stored) { List<Field> fields = new ArrayList<>(); if (indexed) { fields.add(new HalfFloatPoint(name, value.floatValue())); } if (docValued) { fields.add(new SortedNumericDocValuesField(name, HalfFloatPoint.halfFloatToSortableShort(value.floatValue()))); } if (stored) { fields.add(new StoredField(name, value.floatValue())); } return fields; }
@Override public List<Field> createFields(String name, Number value, boolean indexed, boolean docValued, boolean stored) { List<Field> fields = new ArrayList<>(); if (indexed) { fields.add(new FloatPoint(name, value.floatValue())); } if (docValued) { fields.add(new SortedNumericDocValuesField(name, NumericUtils.floatToSortableInt(value.floatValue()))); } if (stored) { fields.add(new StoredField(name, value.floatValue())); } return fields; }
public void testSimpleNumericOps() throws Exception { Directory dir = new RAMDirectory(); IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); Document document = new Document(); document.add(new TextField("_id", "1", Field.Store.YES)); document.add(new LegacyIntField("test", 2, LegacyIntField.TYPE_STORED)); indexWriter.addDocument(document); IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); Document doc = searcher.doc(topDocs.scoreDocs[0].doc); IndexableField f = doc.getField("test"); assertThat(f.stringValue(), equalTo("2")); BytesRefBuilder bytes = new BytesRefBuilder(); LegacyNumericUtils.intToPrefixCoded(2, 0, bytes); topDocs = searcher.search(new TermQuery(new Term("test", bytes.get())), 1); doc = searcher.doc(topDocs.scoreDocs[0].doc); f = doc.getField("test"); assertThat(f.stringValue(), equalTo("2")); indexWriter.close(); }
@Override protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException { ValueAndBoost valueAndBoost = StringFieldMapper.parseCreateFieldForString(context, null /* Out null value is an int so we convert*/, fieldType().boost()); if (valueAndBoost.value() == null && fieldType().nullValue() == null) { return; } if (fieldType().indexOptions() != NONE || fieldType().stored() || fieldType().hasDocValues()) { int count; if (valueAndBoost.value() == null) { count = fieldType().nullValue(); } else { count = countPositions(analyzer, simpleName(), valueAndBoost.value()); } addIntegerFields(context, fields, count, valueAndBoost.boost()); } }
/** * Parse using the provided {@link ParseContext} and return a mapping * update if dynamic mappings modified the mappings, or {@code null} if * mappings were not modified. */ public Mapper parse(ParseContext context) throws IOException { final List<IndexableField> fields = new ArrayList<>(2); try { parseCreateField(context, fields); for (IndexableField field : fields) { if (!customBoost() // don't set boosts eg. on dv fields && field.fieldType().indexOptions() != IndexOptions.NONE && indexCreatedVersion.before(Version.V_5_0_0_alpha1)) { ((Field)(field)).setBoost(fieldType().boost()); } context.doc().add(field); } } catch (Exception e) { throw new MapperParsingException("failed to parse [" + fieldType().name() + "]", e); } multiFields.parse(this, context); return null; }
@Override protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException { if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored() && !fieldType().hasDocValues()) { return; } Boolean value = context.parseExternalValue(Boolean.class); if (value == null) { XContentParser.Token token = context.parser().currentToken(); if (token == XContentParser.Token.VALUE_NULL) { if (fieldType().nullValue() != null) { value = fieldType().nullValue(); } } else { if (indexCreatedVersion.onOrAfter(Version.V_6_0_0_alpha1_UNRELEASED)) { value = context.parser().booleanValue(); } else { value = context.parser().booleanValueLenient(); if (context.parser().isBooleanValueLenient() != context.parser().isBooleanValue()) { String rawValue = context.parser().text(); deprecationLogger.deprecated("Expected a boolean for property [{}] but got [{}]", fieldType().name(), rawValue); } } } } if (value == null) { return; } if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { fields.add(new Field(fieldType().name(), value ? "T" : "F", fieldType())); } if (fieldType().hasDocValues()) { fields.add(new SortedNumericDocValuesField(fieldType().name(), value ? 1 : 0)); } }
public void testNRTSearchOnClosedWriter() throws Exception { Directory dir = new RAMDirectory(); IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); DirectoryReader reader = DirectoryReader.open(indexWriter); for (int i = 0; i < 100; i++) { Document document = new Document(); TextField field = new TextField("_id", Integer.toString(i), Field.Store.YES); field.setBoost(i); document.add(field); indexWriter.addDocument(document); } reader = refreshReader(reader); indexWriter.close(); TermsEnum termDocs = SlowCompositeReaderWrapper.wrap(reader).terms("_id").iterator(); termDocs.next(); }
public ParsedDocument(Field version, SeqNoFieldMapper.SequenceID seqID, String id, String type, String routing, List<Document> documents, BytesReference source, XContentType xContentType, Mapping dynamicMappingsUpdate) { this.version = version; this.seqID = seqID; this.id = id; this.type = type; this.uid = Uid.createUidAsBytes(type, id); this.routing = routing; this.documents = documents; this.source = source; this.dynamicMappingsUpdate = dynamicMappingsUpdate; this.xContentType = xContentType; }
private Engine.Index getIndex(final String id) { final String type = "test"; final ParseContext.Document document = new ParseContext.Document(); document.add(new TextField("test", "test", Field.Store.YES)); final Field uidField = new Field("_uid", Uid.createUid(type, id), UidFieldMapper.Defaults.FIELD_TYPE); final Field versionField = new NumericDocValuesField("_version", Versions.MATCH_ANY); final SeqNoFieldMapper.SequenceID seqID = SeqNoFieldMapper.SequenceID.emptySeqID(); document.add(uidField); document.add(versionField); document.add(seqID.seqNo); document.add(seqID.seqNoDocValue); document.add(seqID.primaryTerm); final BytesReference source = new BytesArray(new byte[] { 1 }); final ParsedDocument doc = new ParsedDocument(versionField, seqID, id, type, null, Arrays.asList(document), source, XContentType.JSON, null); return new Engine.Index(new Term("_uid", doc.uid()), doc); }
private void addFtsStatusDoc(List<Document> docs, FTSStatus ftsStatus, NodeRef nodeRef, NodeRef.Status nodeStatus) { // If we are being called during FTS failover, then don't bother generating a new doc if (ftsStatus == FTSStatus.Clean) { return; } Document doc = new Document(); doc.add(new Field("ID", GUID.generate(), Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO)); doc.add(new Field("FTSREF", nodeRef.toString(), Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO)); doc.add(new Field("TX", nodeStatus == null ? AlfrescoTransactionSupport.getTransactionId() : nodeStatus .getChangeTxnId(), Field.Store.YES, Field.Index.NO_NORMS, Field.TermVector.NO)); doc.add(new Field("FTSSTATUS", ftsStatus.name(), Field.Store.NO, Field.Index.NO_NORMS, Field.TermVector.NO)); docs.add(doc); }
/** * Parse using the provided {@link ParseContext} and return a mapping * update if dynamic mappings modified the mappings, or {@code null} if * mappings were not modified. */ public Mapper parse(ParseContext context) throws IOException { final List<Field> fields = new ArrayList<>(2); try { parseCreateField(context, fields); for (Field field : fields) { if (!customBoost()) { field.setBoost(fieldType().boost()); } context.doc().add(field); } } catch (Exception e) { throw new MapperParsingException("failed to parse [" + fieldType().names().fullName() + "]", e); } multiFields.parse(this, context); return null; }
public void testNoTokens() throws Exception { Directory dir = new RAMDirectory(); IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.KEYWORD_ANALYZER)); FieldType allFt = getAllFieldType(); Document doc = new Document(); doc.add(new Field("_id", "1", StoredField.TYPE)); doc.add(new AllField("_all", "", 2.0f, allFt)); indexWriter.addDocument(doc); IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); TopDocs docs = searcher.search(new MatchAllDocsQuery(), 10); assertThat(docs.totalHits, equalTo(1)); assertThat(docs.scoreDocs[0].doc, equalTo(0)); }
public void testEmpty() throws Exception { Document d = new Document(); d.add(new StringField("field", "value", Field.Store.NO)); writer.addDocument(d); refreshReader(); IndexFieldData fieldData = getForField("non_existing_field"); int max = randomInt(7); for (LeafReaderContext readerContext : readerContexts) { AtomicFieldData previous = null; for (int i = 0; i < max; i++) { AtomicFieldData current = fieldData.load(readerContext); assertThat(current.ramBytesUsed(), equalTo(0L)); if (previous != null) { assertThat(current, not(sameInstance(previous))); } previous = current; } } }
public void testCanOpenIndex() throws IOException { final ShardId shardId = new ShardId("index", "_na_", 1); IndexWriterConfig iwc = newIndexWriterConfig(); Path tempDir = createTempDir(); final BaseDirectoryWrapper dir = newFSDirectory(tempDir); assertFalse(Store.canOpenIndex(logger, tempDir, shardId, (id, l) -> new DummyShardLock(id))); IndexWriter writer = new IndexWriter(dir, iwc); Document doc = new Document(); doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); writer.addDocument(doc); writer.commit(); writer.close(); assertTrue(Store.canOpenIndex(logger, tempDir, shardId, (id, l) -> new DummyShardLock(id))); DirectoryService directoryService = new DirectoryService(shardId, INDEX_SETTINGS) { @Override public Directory newDirectory() throws IOException { return dir; } }; Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId)); store.markStoreCorrupted(new CorruptIndexException("foo", "bar")); assertFalse(Store.canOpenIndex(logger, tempDir, shardId, (id, l) -> new DummyShardLock(id))); store.close(); }
@Override protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException, AlreadyExpiredException { if (enabledState.enabled && !context.sourceToParse().flyweight()) { long ttl = context.sourceToParse().ttl(); if (ttl <= 0 && defaultTTL > 0) { // no ttl provided so we use the default value ttl = defaultTTL; context.sourceToParse().ttl(ttl); } if (ttl > 0) { // a ttl has been provided either externally or in the _source long timestamp = context.sourceToParse().timestamp(); long expire = new Date(timestamp + ttl).getTime(); long now = System.currentTimeMillis(); // there is not point indexing already expired doc if (context.sourceToParse().origin() == SourceToParse.Origin.PRIMARY && now >= expire) { throw new AlreadyExpiredException(context.index(), context.type(), context.id(), timestamp, ttl, now); } // the expiration timestamp (timestamp + ttl) is set as field fields.add(new LongFieldMapper.CustomLongNumericField(expire, fieldType())); } } }
@Override protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException { String value; if (context.externalValueSet()) { value = context.externalValue().toString(); } else { value = context.parser().textOrNull(); } if (value == null) { return; } if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { Field field = new Field(fieldType().name(), value, fieldType()); fields.add(field); } if (fieldType().hasDocValues()) { fields.add(new SortedSetDocValuesField(fieldType().name(), new BytesRef(value))); } }
@Override protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException { RuntimeException e = null; try { innerParseCreateField(context, fields); } catch (IllegalArgumentException e1) { e = e1; } catch (MapperParsingException e2) { e = e2; } if (e != null && !ignoreMalformed.value()) { throw e; } }
public QName getPrimaryAssocTypeQName() { Field field = getDocument().getField("PRIMARYASSOCTYPEQNAME"); if (field != null) { String qname = field.stringValue(); return QName.createQName(qname); } else { return ContentModel.ASSOC_CHILDREN; } }
private void createLock(final LuceneIndex index) throws NoSuchFieldException, IllegalArgumentException, IllegalAccessException, IOException { final Class<LuceneIndex> li = LuceneIndex.class; final java.lang.reflect.Field dirCache = li.getDeclaredField("dirCache"); //NOI18N dirCache.setAccessible(true); Object o = dirCache.get(index); final java.lang.reflect.Field directory = o.getClass().getDeclaredField("fsDir"); //NOI18N directory.setAccessible(true); Directory dir = (Directory) directory.get(o); dir.makeLock("test").obtain(); //NOI18N }
private void clearValidityCache(final LuceneIndex index) throws NoSuchFieldException, IllegalArgumentException, IllegalAccessException, IOException { final Class<LuceneIndex> li = LuceneIndex.class; final java.lang.reflect.Field dirCache = li.getDeclaredField("dirCache"); //NOI18N dirCache.setAccessible(true); Object o = dirCache.get(index); final java.lang.reflect.Field reader = o.getClass().getDeclaredField("reader"); reader.setAccessible(true); IndexReader r = (IndexReader) reader.get(o); if (r != null) { r.close(); } reader.set(o,null); }
@Override public List<Document> getDoc(List<IfcProductRecordText> items) { List<Document> docs = new ArrayList<Document>(); FieldType storedType = new FieldType(); storedType.setIndexed(true); storedType.setStored(true); storedType.setTokenized(true); FieldType unTokeType = new FieldType(); unTokeType.setIndexed(true); unTokeType.setStored(true); unTokeType.setTokenized(false); for (IfcProductRecordText record : items) { Document doc = new Document(); Field oid = genStringFieldCheckNull(Key_Oid, record.getOid(), unTokeType); Field type = genStringFieldCheckNull(Key_Type, record.getType(), storedType); Field name = genStringFieldCheckNull(Key_Name, record.getName(), storedType); Field detail = genStringFieldCheckNull(Key_Detail, record.getDetail(), storedType); doc.add(oid); doc.add(type); doc.add(name); doc.add(detail); docs.add(doc); } return docs; }
public Field getCompletionField(ContextMapping.Context ctx, String input, BytesRef payload) { final String originalInput = input; if (input.length() > maxInputLength) { final int len = correctSubStringLen(input, Math.min(maxInputLength, input.length())); input = input.substring(0, len); } for (int i = 0; i < input.length(); i++) { if (isReservedChar(input.charAt(i))) { throw new IllegalArgumentException("Illegal input [" + originalInput + "] UTF-16 codepoint [0x" + Integer.toHexString(input.charAt(i)).toUpperCase(Locale.ROOT) + "] at position " + i + " is a reserved character"); } } return new SuggestField(fieldType().names().indexName(), ctx, input, fieldType(), payload, fieldType().analyzingSuggestLookupProvider); }
private static Function<? super String, ? extends Field> getFunction(String name){ Function<? super String, ? extends Field> mapper = null; switch (name){ case ID: mapper = id -> new StringField(ID, id, Field.Store.YES); break; case SUBJECT: mapper = subject -> new TextField(SUBJECT, subject, Field.Store.YES); break; } return mapper; }
/** * Create the document to write to lucene. * It creates entry for three fields for each document (URI_of_DBpediaResource, Label_DBpediaResource, ClassLabel_of_DBpediaResource). * * @return the document containing index entry information */ public Document createDocument() { Document doc = new Document(); doc.add(new StoredField(URI_FIELD, uri)); doc.add(new TextField(LABEL_FIELD, label, Field.Store.YES)); doc.add(new StringField(CLASS_LABEL_FIELD, class_label, Field.Store.YES)); return doc; }
public List<Field> get(int n, FieldSelector fieldSelector) throws IOException { Document document = ReferenceCountingReadOnlyIndexReader.super.document(n, fieldSelector); Field[] fields = document.getFields(fieldName); ArrayList<Field> cacheable = new ArrayList<Field>(fields.length); for (Field field : fields) { cacheable.add(field); } return cacheable; }
@Override protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException { String ipAsString; if (context.externalValueSet()) { ipAsString = (String) context.externalValue(); if (ipAsString == null) { ipAsString = fieldType().nullValueAsString(); } } else { if (context.parser().currentToken() == XContentParser.Token.VALUE_NULL) { ipAsString = fieldType().nullValueAsString(); } else { ipAsString = context.parser().text(); } } if (ipAsString == null) { return; } if (context.includeInAll(includeInAll, this)) { context.allEntries().addText(fieldType().names().fullName(), ipAsString, fieldType().boost()); } final long value = ipToLong(ipAsString); if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { CustomLongNumericField field = new CustomLongNumericField(value, fieldType()); field.setBoost(fieldType().boost()); fields.add(field); } if (fieldType().hasDocValues()) { addDocValue(context, fields, value); } }
/** * Create a new document and write it to the given writer * * @param writer The writer to write out to * @param documentType The document type to save in the doc * @param fields The searchable and data fields to write into doc * @throws IOException If there was problem writing doc */ private static void addDocument(IndexWriter writer, DocumentType documentType, Field... fields) throws IOException { // make a new, empty document Document doc = new Document(); // add doc type field doc.add(new Field("documentType", documentType.toString(), Field.Store.YES, Field.Index.NOT_ANALYZED_NO_NORMS)); // add other fields if (fields != null) { for (Field field : fields) doc.add(field); } // write into index, assuming we are recreating every time writer.addDocument(doc); }
public void updateIndex(Blog blog) throws Exception { IndexWriter writer = getWriter(); Document doc = new Document(); doc.add(new StringField("blogid", String.valueOf(blog.getBlogid()), Field.Store.YES)); doc.add(new TextField("title", blog.getTitle(), Field.Store.YES)); doc.add(new TextField("content", Jsoup.parse(blog.getContent()).text(), Field.Store.YES)); writer.updateDocument(new Term("blogid", String.valueOf(blog.getBlogid())), doc); writer.close(); }
@Override public Mapper parse(ParseContext context) throws IOException { QueryShardContext queryShardContext = this.queryShardContext.get(); if (context.doc().getField(queryBuilderField.name()) != null) { // If a percolator query has been defined in an array object then multiple percolator queries // could be provided. In order to prevent this we fail if we try to parse more than one query // for the current document. throw new IllegalArgumentException("a document can only contain one percolator query"); } XContentParser parser = context.parser(); QueryBuilder queryBuilder = parseQueryBuilder( queryShardContext.newParseContext(parser), parser.getTokenLocation() ); verifyQuery(queryBuilder); // Fetching of terms, shapes and indexed scripts happen during this rewrite: queryBuilder = queryBuilder.rewrite(queryShardContext); try (XContentBuilder builder = XContentFactory.contentBuilder(QUERY_BUILDER_CONTENT_TYPE)) { queryBuilder.toXContent(builder, new MapParams(Collections.emptyMap())); builder.flush(); byte[] queryBuilderAsBytes = BytesReference.toBytes(builder.bytes()); context.doc().add(new Field(queryBuilderField.name(), queryBuilderAsBytes, queryBuilderField.fieldType())); } Query query = toQuery(queryShardContext, mapUnmappedFieldAsString, queryBuilder); processQuery(query, context); return null; }
@Override protected Field[] getFields(IndexReader reader, int docId, String fieldName) throws IOException { // we know its low level reader, and matching docId, since that's how we call the highlighter with SourceLookup sourceLookup = searchContext.lookup().source(); sourceLookup.setSegmentAndDocument((LeafReaderContext) reader.getContext(), docId); List<Object> values = sourceLookup.extractRawValues(mapper.fieldType().name()); Field[] fields = new Field[values.size()]; for (int i = 0; i < values.size(); i++) { fields[i] = new Field(mapper.fieldType().name(), values.get(i).toString(), TextField.TYPE_NOT_STORED); } return fields; }
@Override protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException { if (!fieldType().stored() && !fieldType().hasDocValues()) { return; } byte[] value = context.parseExternalValue(byte[].class); if (value == null) { if (context.parser().currentToken() == XContentParser.Token.VALUE_NULL) { return; } else { value = context.parser().binaryValue(); } } if (value == null) { return; } if (fieldType().stored()) { fields.add(new Field(fieldType().name(), value, fieldType())); } if (fieldType().hasDocValues()) { CustomBinaryDocValuesField field = (CustomBinaryDocValuesField) context.doc().getByKey(fieldType().name()); if (field == null) { field = new CustomBinaryDocValuesField(fieldType().name(), value); context.doc().addWithKey(fieldType().name(), field); } else { field.add(value); } } }
@Override public List<Field> createFields(String name, Number value, boolean indexed, boolean docValued, boolean stored) { List<Field> fields = new ArrayList<>(); if (indexed) { fields.add(new IntPoint(name, value.intValue())); } if (docValued) { fields.add(new SortedNumericDocValuesField(name, value.intValue())); } if (stored) { fields.add(new StoredField(name, value.intValue())); } return fields; }
@Override public List<Field> createFields(String name, Number value, boolean indexed, boolean docValued, boolean stored) { List<Field> fields = new ArrayList<>(); if (indexed) { fields.add(new LongPoint(name, value.longValue())); } if (docValued) { fields.add(new SortedNumericDocValuesField(name, value.longValue())); } if (stored) { fields.add(new StoredField(name, value.longValue())); } return fields; }
@Override public Document createDocument(Artist artist, MusicFolder musicFolder) { Document doc = new Document(); doc.add(new NumericField(FIELD_ID, Field.Store.YES, false).setIntValue(artist.getId())); doc.add(new Field(FIELD_ARTIST, artist.getName(), Field.Store.YES, Field.Index.ANALYZED)); doc.add(new NumericField(FIELD_FOLDER_ID, Field.Store.NO, true).setIntValue(musicFolder.getId())); return doc; }
private void addDoc(RandomIndexWriter writer, String[] texts) throws IOException { Document doc = new Document(); for (String text : texts) { doc.add(newTextField("text", text, Field.Store.YES)); } writer.addDocument(doc); }