static void indexDoc(IndexWriter writer, Path file, long lastModified) throws IOException { try (InputStream stream = Files.newInputStream(file)) { Document doc = new Document(); Field pathField = new StringField("path", file.toString(), Field.Store.YES); doc.add(pathField); doc.add(new LongPoint("modified", lastModified)); doc.add(new TextField("contents", new BufferedReader(new InputStreamReader(stream, StandardCharsets.UTF_8)))); if (writer.getConfig().getOpenMode() == OpenMode.CREATE) { System.out.println("adding " + file); writer.addDocument(doc); } else { System.out.println("updating " + file); writer.updateDocument(new Term("path", file.toString()), doc); } } }
public void testVectorHighlighter() throws Exception { Directory dir = new RAMDirectory(); IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); Document document = new Document(); document.add(new TextField("_id", "1", Field.Store.YES)); FieldType vectorsType = new FieldType(TextField.TYPE_STORED); vectorsType.setStoreTermVectors(true); vectorsType.setStoreTermVectorPositions(true); vectorsType.setStoreTermVectorOffsets(true); document.add(new Field("content", "the big bad dog", vectorsType)); indexWriter.addDocument(document); IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); assertThat(topDocs.totalHits, equalTo(1)); FastVectorHighlighter highlighter = new FastVectorHighlighter(); String fragment = highlighter.getBestFragment(highlighter.getFieldQuery(new TermQuery(new Term("content", "bad"))), reader, topDocs.scoreDocs[0].doc, "content", 30); assertThat(fragment, notNullValue()); assertThat(fragment, equalTo("the big <b>bad</b> dog")); }
@Override public String convert(Term currentTerm) throws Stop { if (fieldName != currentTerm.field()) { throw STOP; } String currentText = currentTerm.text(); if (all || currentText.startsWith(value)) { if (directOnly) { int index = currentText.indexOf('.', value.length()); //NOI18N if (index>0) { currentText = currentText.substring(0,index); } } return currentText; } return null; }
@Override public Query apply(Function input, Context context) throws IOException { Tuple<Reference, Literal> prepare = prepare(input); if (prepare == null) { return null; } String fieldName = prepare.v1().info().ident().columnIdent().fqn(); Object value = prepare.v2().value(); if (value instanceof BytesRef) { RegexQuery query = new RegexQuery(new Term(fieldName, BytesRefs.toBytesRef(value))); query.setRegexImplementation(new JavaUtilRegexCapabilities( JavaUtilRegexCapabilities.FLAG_CASE_INSENSITIVE | JavaUtilRegexCapabilities.FLAG_UNICODE_CASE)); return query; } throw new IllegalArgumentException("Can only use ~* with patterns of type string"); }
public void testSimpleNumericOps() throws Exception { Directory dir = new RAMDirectory(); IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); Document document = new Document(); document.add(new TextField("_id", "1", Field.Store.YES)); document.add(new LegacyIntField("test", 2, LegacyIntField.TYPE_STORED)); indexWriter.addDocument(document); IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); Document doc = searcher.doc(topDocs.scoreDocs[0].doc); IndexableField f = doc.getField("test"); assertThat(f.stringValue(), equalTo("2")); BytesRefBuilder bytes = new BytesRefBuilder(); LegacyNumericUtils.intToPrefixCoded(2, 0, bytes); topDocs = searcher.search(new TermQuery(new Term("test", bytes.get())), 1); doc = searcher.doc(topDocs.scoreDocs[0].doc); f = doc.getField("test"); assertThat(f.stringValue(), equalTo("2")); indexWriter.close(); }
private void deleteDocumentByTypeAndId(FeatureType type, Long id, IndexWriter writer) throws IOException { BooleanQuery.Builder deleteQueryBuilder = new BooleanQuery.Builder(); TermQuery idQuery = new TermQuery(new Term(FeatureIndexFields.FILE_ID.getFieldName(), id.toString())); deleteQueryBuilder.add(idQuery, BooleanClause.Occur.MUST); if (type != FeatureType.GENE) { TermQuery typeQuery = new TermQuery(new Term(FeatureIndexFields.FEATURE_TYPE.getFieldName(), type.getFileValue())); deleteQueryBuilder.add(typeQuery, BooleanClause.Occur.MUST); } else { deleteQueryBuilder.add(new TermQuery(new Term(FeatureIndexFields.FEATURE_TYPE.getFieldName(), FeatureType.BOOKMARK.getFileValue())), BooleanClause.Occur.MUST_NOT); deleteQueryBuilder.add(new TermQuery(new Term(FeatureIndexFields.FEATURE_TYPE.getFieldName(), FeatureType.VARIATION.getFileValue())), BooleanClause.Occur.MUST_NOT); } writer.deleteDocuments(deleteQueryBuilder.build()); }
public void testToQueryPhraseQuery() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); Query query = queryStringQuery("\"term1 term2\"") .defaultField(STRING_FIELD_NAME) .phraseSlop(3) .toQuery(createShardContext()); assertThat(query, instanceOf(DisjunctionMaxQuery.class)); DisjunctionMaxQuery disjunctionMaxQuery = (DisjunctionMaxQuery) query; assertThat(disjunctionMaxQuery.getDisjuncts().size(), equalTo(1)); assertThat(disjunctionMaxQuery.getDisjuncts().get(0), instanceOf(PhraseQuery.class)); PhraseQuery phraseQuery = (PhraseQuery)disjunctionMaxQuery.getDisjuncts().get(0); assertThat(phraseQuery.getTerms().length, equalTo(2)); assertThat(phraseQuery.getTerms()[0], equalTo(new Term(STRING_FIELD_NAME, "term1"))); assertThat(phraseQuery.getTerms()[1], equalTo(new Term(STRING_FIELD_NAME, "term2"))); assertThat(phraseQuery.getSlop(), equalTo(3)); }
@Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); id = in.readLong(); int termsSize = in.readVInt(); if (termsSize == 0) { terms = EMPTY_TERMS; } else { terms = new Term[termsSize]; for (int i = 0; i < terms.length; i++) { terms[i] = new Term(in.readString(), in.readBytesRef()); } } this.termStatistics = readTermStats(in, terms); readFieldStats(in, fieldStatistics); maxDoc = in.readVInt(); }
/** Returns the live version (add or delete) for this uid. */ VersionValue getUnderLock(final Term uid) { Maps currentMaps = maps; // First try to get the "live" value: VersionValue value = currentMaps.current.get(uid.bytes()); if (value != null) { return value; } value = currentMaps.old.get(uid.bytes()); if (value != null) { return value; } return tombstones.get(uid.bytes()); }
private static BooleanQuery constructDefaultLocaleHandlingQuery( String fieldName, String locale, String defaultLocale, String searchPhrase) { BooleanQuery bq1 = new BooleanQuery(); TermQuery tq1 = new TermQuery( new Term(fieldName + ProductClassBridge.DEFINED_LOCALES_SUFFIX, defaultLocale)); TermQuery tq2 = new TermQuery(new Term( fieldName + ProductClassBridge.DEFINED_LOCALES_SUFFIX, locale)); bq1.add(tq1, Occur.MUST); bq1.add(tq2, Occur.MUST_NOT); BooleanQuery bq2 = new BooleanQuery(); WildcardQuery wq1 = new WildcardQuery( new Term(fieldName + defaultLocale, "*" + searchPhrase.toLowerCase() + "*")); bq2.add(wq1, Occur.SHOULD); BooleanQuery finalQuery = new BooleanQuery(); finalQuery.add(bq1, Occur.MUST); finalQuery.add(bq2, Occur.MUST); return finalQuery; }
/** * Gets a Query that will return all OAI status deleted records that did not come from any of the existing * file directories configured in the RepositoryManager. * * @return A Query for deleted documents not from any directory * @see #getDeletedDocsNotFromAnyDirectory */ public final Query getDeletedDocsNotFromAnyDirectoryQuery() { BooleanQuery dirsQ = new BooleanQuery(); dirsQ.add(new TermQuery(new Term("deleted", "true")), BooleanClause.Occur.MUST); List setInfos = getSetInfos(); if (setInfos != null) { SetInfo setInfo = null; for (int i = 0; i < setInfos.size(); i++) { setInfo = (SetInfo) setInfos.get(i); dirsQ.add(new TermQuery(new Term("docdir", setInfo.getDirectory().trim())), BooleanClause.Occur.MUST_NOT); } } return dirsQ; }
private void checkIndexContent(final String elementId, final String fieldContent, final int expectedAmount) throws IOException { final IndexReader reader = IndexManager.getInstance().getIndex().getIndexReader(); final IndexSearcher searcher = new IndexSearcher(reader); final TopDocs topDocs = searcher.search(new TermQuery(new Term(FIELDNAME, fieldContent)), expectedAmount + 10); assertNotNull(topDocs); assertTrue(topDocs.totalHits == expectedAmount); if(expectedAmount > 0) { final ScoreDoc scoreDoc = topDocs.scoreDocs[0]; assertNotNull(scoreDoc); final Document doc = reader.document(scoreDoc.doc); assertNotNull(doc); assertEquals(fieldContent, doc.get(FIELDNAME)); assertEquals(elementId, doc.get(IIndexElement.FIELD_ID)); assertEquals(INDEX_TYPE, doc.get(IIndexElement.FIELD_INDEX_TYPE)); } }
public void testDisabledFieldNamesField() throws Exception { QueryShardContext context = createShardContext(); context.getMapperService().merge("new_type", new CompressedXContent( PutMappingRequest.buildFromSimplifiedDef("new_type", "foo", "type=text", "_field_names", "enabled=false").string()), MapperService.MergeReason.MAPPING_UPDATE, true); QueryStringQueryBuilder queryBuilder = new QueryStringQueryBuilder("foo:*"); Query query = queryBuilder.toQuery(context); Query expected = new WildcardQuery(new Term("foo", "*")); assertThat(query, equalTo(expected)); context.getMapperService().merge("new_type", new CompressedXContent( PutMappingRequest.buildFromSimplifiedDef("new_type", "foo", "type=text", "_field_names", "enabled=true").string()), MapperService.MergeReason.MAPPING_UPDATE, true); }
@Override protected void doAssertLuceneQuery(TermQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException { assertThat(query, either(instanceOf(TermQuery.class)).or(instanceOf(PointRangeQuery.class))); MappedFieldType mapper = context.getQueryShardContext().fieldMapper(queryBuilder.fieldName()); if (query instanceof TermQuery) { TermQuery termQuery = (TermQuery) query; assertThat(termQuery.getTerm().field(), equalTo(queryBuilder.fieldName())); if (mapper != null) { Term term = ((TermQuery) mapper.termQuery(queryBuilder.value(), null)).getTerm(); assertThat(termQuery.getTerm(), equalTo(term)); } else { assertThat(termQuery.getTerm().bytes(), equalTo(BytesRefs.toBytesRef(queryBuilder.value()))); } } else { assertEquals(query, mapper.termQuery(queryBuilder.value(), null)); } }
@Override public SimilarTermModel[] GetSimilarTerms(String field, String[] queryTerms) { SimilarTermModel[] output = new SimilarTermModel[queryTerms.length]; for (int i = 0; i < queryTerms.length; i++) { TermWeightTuple[] similar = new TermWeightTuple[1]; similar[0] = similarTerm(field); output[i] = new SimilarTermModel(new Term(field, queryTerms[i]), similar); } return output; }
void add (final int docId, final @NonNull Term term) { Set<Term> slot = doc2Terms.get(docId); if (slot == null) { slot = new HashSet<Term>(); doc2Terms.put(docId, slot); } slot.add(term); }
@Override public void visitMatchingTerms( IndexReader reader, String fieldName, MatchingTermVisitor mtv) throws IOException { int prefixLength = prefix.length(); Terms terms = MultiFields.getTerms(reader, fieldName); if (terms != null) { Matcher matcher = pattern.matcher(""); try { TermsEnum termsEnum = terms.iterator(null); TermsEnum.SeekStatus status = termsEnum.seekCeil(prefixRef); BytesRef text; if (status == TermsEnum.SeekStatus.FOUND) { text = prefixRef; } else if (status == TermsEnum.SeekStatus.NOT_FOUND) { text = termsEnum.term(); } else { text = null; } while(text != null) { if (text != null && StringHelper.startsWith(text, prefixRef)) { String textString = text.utf8ToString(); matcher.reset(textString.substring(prefixLength)); if (matcher.matches()) { mtv.visitMatchingTerm(new Term(fieldName, textString)); } } else { break; } text = termsEnum.next(); } } finally { matcher.reset(); } } }
public void testExtractQueryMetadata_unsupportedQueryInBoolQueryWithMustClauses() { TermRangeQuery unsupportedQuery = new TermRangeQuery("_field", null, null, true, false); TermQuery termQuery1 = new TermQuery(new Term("_field", "_term")); BooleanQuery.Builder builder = new BooleanQuery.Builder(); builder.add(termQuery1, BooleanClause.Occur.MUST); builder.add(unsupportedQuery, BooleanClause.Occur.MUST); BooleanQuery bq1 = builder.build(); Result result = analyze(bq1); assertThat(result.verified, is(false)); assertTermsEqual(result.terms, termQuery1.getTerm()); TermQuery termQuery2 = new TermQuery(new Term("_field", "_longer_term")); builder = new BooleanQuery.Builder(); builder.add(termQuery1, BooleanClause.Occur.MUST); builder.add(termQuery2, BooleanClause.Occur.MUST); builder.add(unsupportedQuery, BooleanClause.Occur.MUST); bq1 = builder.build(); result = analyze(bq1); assertThat(result.verified, is(false)); assertTermsEqual(result.terms, termQuery2.getTerm()); builder = new BooleanQuery.Builder(); builder.add(unsupportedQuery, BooleanClause.Occur.MUST); builder.add(unsupportedQuery, BooleanClause.Occur.MUST); BooleanQuery bq2 = builder.build(); UnsupportedQueryException e = expectThrows(UnsupportedQueryException.class, () -> analyze(bq2)); assertThat(e.getUnsupportedQuery(), sameInstance(unsupportedQuery)); }
public void testToQueryWilcardQueryWithSynonyms() throws Exception { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); for (Operator op : Operator.values()) { BooleanClause.Occur defaultOp = op.toBooleanClauseOccur(); MapperQueryParser queryParser = new MapperQueryParser(createShardContext()); QueryParserSettings settings = new QueryParserSettings("first foo-bar-foobar* last"); settings.defaultField(STRING_FIELD_NAME); settings.fieldsAndWeights(Collections.emptyMap()); settings.analyzeWildcard(true); settings.fuzziness(Fuzziness.AUTO); settings.rewriteMethod(MultiTermQuery.CONSTANT_SCORE_REWRITE); settings.defaultOperator(op.toQueryParserOperator()); settings.forceAnalyzer(new MockRepeatAnalyzer()); queryParser.reset(settings); Query query = queryParser.parse("first foo-bar-foobar* last"); Query expectedQuery = new BooleanQuery.Builder() .add(new BooleanClause(new SynonymQuery(new Term(STRING_FIELD_NAME, "first"), new Term(STRING_FIELD_NAME, "first")), defaultOp)) .add(new BooleanQuery.Builder() .add(new BooleanClause(new SynonymQuery(new Term(STRING_FIELD_NAME, "foo"), new Term(STRING_FIELD_NAME, "foo")), defaultOp)) .add(new BooleanClause(new SynonymQuery(new Term(STRING_FIELD_NAME, "bar"), new Term(STRING_FIELD_NAME, "bar")), defaultOp)) .add(new BooleanQuery.Builder() .add(new BooleanClause(new PrefixQuery(new Term(STRING_FIELD_NAME, "foobar")), BooleanClause.Occur.SHOULD)) .add(new BooleanClause(new PrefixQuery(new Term(STRING_FIELD_NAME, "foobar")), BooleanClause.Occur.SHOULD)) .setDisableCoord(true) .build(), defaultOp) .build(), defaultOp) .add(new BooleanClause(new SynonymQuery(new Term(STRING_FIELD_NAME, "last"), new Term(STRING_FIELD_NAME, "last")), defaultOp)) .build(); assertThat(query, Matchers.equalTo(expectedQuery)); } }
@Override public void readFrom(StreamInput in) throws IOException { int size = in.readVInt(); termStatistics = HppcMaps.newMap(size); for (int i = 0; i < size; i++) { Term term = new Term(in.readString(), in.readBytesRef()); TermStatistics stats = new TermStatistics(in.readBytesRef(), in.readVLong(), DfsSearchResult.subOne(in.readVLong())); termStatistics.put(term, stats); } fieldStatistics = DfsSearchResult.readFieldStats(in); maxDoc = in.readVLong(); }
@Override public final Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, QueryShardContext context) { failIfNotIndexed(); PrefixQuery query = new PrefixQuery(new Term(name(), indexedValueForSearch(value))); if (method != null) { query.setRewriteMethod(method); } return query; }
@Override public <S, T> void queryDocTerms( Map<? super T, Set<S>> result, Convertor<? super org.apache.lucene.document.Document, T> convertor, Convertor<? super Term, S> termConvertor, FieldSelector selector, AtomicBoolean cancel, Query... queries) throws IOException, InterruptedException { await(cancel); }
private static <T> Set<T> convertTerms(final Convertor<? super Term, T> convertor, final Set<? extends Term> terms) { final Set<T> result = new HashSet<T>(terms.size()); for (Term term : terms) { result.add(convertor.convert(term)); } return result; }
private void recurseTerms(List<List<TermBitSet>> bitSets, int index, String[] curValues, OpenBitSet curBits, MatrixResults results, IndexReader reader, boolean countOnly) { List<TermBitSet> termBitSetList = bitSets.get(index); boolean last = index == curValues.length - 1; for( TermBitSet termBitSet : termBitSetList ) { OpenBitSet termBits = termBitSet.bitSet; Term term = termBitSet.term; // if we don't intersect there's no point in recursing further in if( curBits.intersects(termBits) ) { // Collect current term's value into the value array curValues[index] = term.text(); OpenBitSet docBits = (OpenBitSet) curBits.clone(); docBits.and(termBits); if( last ) { int count; List<ItemIdKey> ids = null; ArrayList<String> vals = new ArrayList<String>(Arrays.asList(curValues)); if( !countOnly ) { ids = getIdsForBitset(docBits, reader); count = ids.size(); } else { count = (int) docBits.cardinality(); } results.addEntry(new MatrixResults.MatrixEntry(vals, ids, count)); } else { recurseTerms(bitSets, index + 1, curValues, docBits, results, reader, countOnly); } } } }
@Override public void run() { try { downLatch.await(); for (int opCount = 0; opCount < opsPerThread; opCount++) { Translog.Operation op; final Translog.Operation.Type type = randomFrom(Translog.Operation.Type.values()); switch (type) { case CREATE: case INDEX: op = new Translog.Index("test", threadId + "_" + opCount, randomUnicodeOfLengthBetween(1, 20 * 1024).getBytes("UTF-8")); break; case DELETE: op = new Translog.Delete( new Term("_uid", threadId + "_" + opCount), opCount, 0, 1 + randomInt(100000), randomFrom(VersionType.values())); break; case NO_OP: op = new Translog.NoOp(randomNonNegativeLong(), randomNonNegativeLong(), randomAsciiOfLength(16)); break; default: throw new AssertionError("unsupported operation type [" + type + "]"); } Translog.Location loc = add(op); writtenOperations.add(new LocationOperation(op, loc)); afterAdd(); } } catch (Exception t) { threadExceptions[threadId] = t; } }
private void addExpandedTerms(IndexReader reader, MultiPhraseQuery query, String lastTerm, int count) throws IOException { final Term[] expandedTerms = expand(reader, FreeTextQuery.FIELD_NAME_AUTOCOMPLETE, lastTerm); if( expandedTerms.length > 0 ) { query.add(expandedTerms, count); } else if( !lastTerm.isEmpty() ) { query.add(new Term[]{new Term(FreeTextQuery.FIELD_NAME_AUTOCOMPLETE, lastTerm)}, count); } }
@Override protected Query newFuzzyQuery(Term term, float minimumSimilarity, int prefixLength) { String text = term.text(); int numEdits = FuzzyQuery.floatToEdits(minimumSimilarity, text.codePointCount(0, text.length())); FuzzyQuery query = new FuzzyQuery(term, numEdits, prefixLength, settings.fuzzyMaxExpansions(), FuzzyQuery.defaultTranspositions); QueryParsers.setRewriteMethod(query, settings.fuzzyRewriteMethod()); return query; }
public void testMinScoreAllIncluded() throws Exception { Term term = randomTerm(); Query query = new TermQuery(term); FunctionScoreQuery fsq = new FunctionScoreQuery(query, null, 0f, null, Float.POSITIVE_INFINITY); assertSameScores(query, fsq); FiltersFunctionScoreQuery ffsq = new FiltersFunctionScoreQuery(query, ScoreMode.SUM, new FilterFunction[0], Float.POSITIVE_INFINITY, 0f, CombineFunction.MULTIPLY); assertSameScores(query, ffsq); }
public void testToQueryFieldsWildcard() throws Exception { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); Query query = queryStringQuery("test").field("mapped_str*").useDisMax(false).toQuery(createShardContext()); assertThat(query, instanceOf(BooleanQuery.class)); BooleanQuery bQuery = (BooleanQuery) query; assertThat(bQuery.clauses().size(), equalTo(2)); assertThat(assertBooleanSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test"))); assertThat(assertBooleanSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(STRING_FIELD_NAME_2, "test"))); }
public void testWithMetaDataField() throws IOException { QueryShardContext context = createShardContext(); for (String field : new String[]{"_type", "_all"}) { SpanTermQueryBuilder spanTermQueryBuilder = new SpanTermQueryBuilder(field, "toto"); Query query = spanTermQueryBuilder.toQuery(context); Query expected = new SpanTermQuery(new Term(field, "toto")); assertEquals(expected, query); } }
/** * 更新博客索引 * * @param user * @throws Exception */ public void updateIndex(UUser user) throws Exception { IndexWriter writer = getWriter(); Document doc = new Document(); doc.add(new StringField("userid", String.valueOf(user.getId()), Field.Store.YES)); doc.add(new TextField("username", user.getUsername(), Field.Store.YES)); doc.add(new TextField("description", user.getDescription(), Field.Store.YES)); writer.updateDocument(new Term("userid", String.valueOf(user.getId())), doc); writer.close(); }
private TopDocs getTopDocs(IndexSearcher searcher, IndexFieldData<?> indexFieldData, String missingValue, MultiValueMode sortMode, int n, boolean reverse) throws IOException { Query parentFilter = new TermQuery(new Term("__type", "parent")); Query childFilter = new TermQuery(new Term("__type", "child")); XFieldComparatorSource nestedComparatorSource = indexFieldData.comparatorSource(missingValue, sortMode, createNested(searcher, parentFilter, childFilter)); Query query = new ConstantScoreQuery(parentFilter); Sort sort = new Sort(new SortField("f", nestedComparatorSource, reverse)); return searcher.search(query, n, sort); }
@Override public boolean addAll(Collection<? extends Term> terms) { boolean result = false; for (Term term : terms) { result = delegate.add(term); } return result; }
/** Add multiple terms at the next position in the phrase. Any of the terms * may match. * * @see PhraseQuery#add(Term) */ public void add(Term[] terms) { int position = 0; if (positions.size() > 0) position = positions.get(positions.size()-1).intValue() + 1; add(terms, position); }
TermInfo seekEnum(SegmentTermEnum enumerator, Term term, boolean useCache) throws IOException { if (useCache) { return seekEnum(enumerator, term, termsCache.get(new CloneableTerm(deepCopyOf(term))), useCache); } else { return seekEnum(enumerator, term, null, useCache); } }
public void testAnalyzerWildcardWithSynonyms() { SimpleQueryParser.Settings settings = new SimpleQueryParser.Settings(); settings.analyzeWildcard(true); Map<String, Float> weights = new HashMap<>(); weights.put("field1", 1.0f); SimpleQueryParser parser = new MockSimpleQueryParser(new MockRepeatAnalyzer(), weights, -1, settings); for (Operator op : Operator.values()) { BooleanClause.Occur defaultOp = op.toBooleanClauseOccur(); parser.setDefaultOperator(defaultOp); Query query = parser.parse("first foo-bar-foobar* last"); Query expectedQuery = new BooleanQuery.Builder() .add(new BooleanClause(new SynonymQuery(new Term("field1", "first"), new Term("field1", "first")), defaultOp)) .add(new BooleanQuery.Builder() .add(new BooleanClause(new SynonymQuery(new Term("field1", "foo"), new Term("field1", "foo")), defaultOp)) .add(new BooleanClause(new SynonymQuery(new Term("field1", "bar"), new Term("field1", "bar")), defaultOp)) .add(new BooleanQuery.Builder() .add(new BooleanClause(new PrefixQuery(new Term("field1", "foobar")), BooleanClause.Occur.SHOULD)) .add(new BooleanClause(new PrefixQuery(new Term("field1", "foobar")), BooleanClause.Occur.SHOULD)) .setDisableCoord(true) .build(), defaultOp) .build(), defaultOp) .add(new BooleanClause(new SynonymQuery(new Term("field1", "last"), new Term("field1", "last")), defaultOp)) .build(); assertThat(query, equalTo(expectedQuery)); } }
@Override protected Query newTermQuery(Term term, TermContext context) { if (fieldType == null) { return super.newTermQuery(term, context); } final Query query = fieldType.queryStringTermQuery(term); if (query == null) { return super.newTermQuery(term, context); } else { return query; } }
/** * Deletes the given {@link IIndexElement} from the index. * @param element the {@link IIndexElement} to remove. * @throws IOException if an error occurred in the index. */ private void delete(final IIndexElement element) throws IOException { final IIndexTypeConf conf = indexData.getConf(); // build query for deletion final BooleanQuery query = new BooleanQuery(); query.add(new TermQuery(new Term(IIndexElement.FIELD_ID, element.getId())), Occur.MUST); query.add(new TermQuery(new Term(IIndexElement.FIELD_INDEX_TYPE, conf.getName())), Occur.MUST); index.deleteDocuments(query); }
@Override protected ExplainResponse shardOperation(ExplainRequest request, ShardId shardId) throws IOException { ShardSearchLocalRequest shardSearchLocalRequest = new ShardSearchLocalRequest(shardId, new String[]{request.type()}, request.nowInMillis, request.filteringAlias()); SearchContext context = searchService.createSearchContext(shardSearchLocalRequest, SearchService.NO_TIMEOUT, null); Term uidTerm = new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(request.type(), request.id())); Engine.GetResult result = null; try { result = context.indexShard().get(new Engine.Get(false, uidTerm)); if (!result.exists()) { return new ExplainResponse(shardId.getIndexName(), request.type(), request.id(), false); } context.parsedQuery(context.getQueryShardContext().toQuery(request.query())); context.preProcess(true); int topLevelDocId = result.docIdAndVersion().docId + result.docIdAndVersion().context.docBase; Explanation explanation = context.searcher().explain(context.query(), topLevelDocId); for (RescoreSearchContext ctx : context.rescore()) { Rescorer rescorer = ctx.rescorer(); explanation = rescorer.explain(topLevelDocId, context, ctx, explanation); } if (request.storedFields() != null || (request.fetchSourceContext() != null && request.fetchSourceContext().fetchSource())) { // Advantage is that we're not opening a second searcher to retrieve the _source. Also // because we are working in the same searcher in engineGetResult we can be sure that a // doc isn't deleted between the initial get and this call. GetResult getResult = context.indexShard().getService().get(result, request.id(), request.type(), request.storedFields(), request.fetchSourceContext()); return new ExplainResponse(shardId.getIndexName(), request.type(), request.id(), true, explanation, getResult); } else { return new ExplainResponse(shardId.getIndexName(), request.type(), request.id(), true, explanation); } } catch (IOException e) { throw new ElasticsearchException("Could not explain", e); } finally { Releasables.close(result, context); } }
@Override protected Query newTermQuery(Term term) { if (currentFieldType != null) { Query termQuery = currentFieldType.queryStringTermQuery(term); if (termQuery != null) { return termQuery; } } return super.newTermQuery(term); }