protected Query makeQueryFromShape(Shape shape) { SpatialArgs args = new SpatialArgs(operation, shape); if (!Double.isNaN(distErrPct)) args.setDistErrPct(distErrPct); if (score) { ValueSource valueSource = strategy.makeDistanceValueSource(shape.getCenter()); return new CustomScoreQuery(strategy.makeQuery(args), new FunctionQuery(valueSource)); } else { //strategy.makeQuery() could potentially score (isn't well defined) so instead we call // makeFilter() and wrap Filter filter = strategy.makeFilter(args); if (filter instanceof QueryWrapperFilter) { return ((QueryWrapperFilter)filter).getQuery(); } else { return new ConstantScoreQuery(filter); } } }
public void testWithCachingFilter() throws Exception { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); IndexReader reader = writer.getReader(); writer.close(); IndexSearcher searcher = newSearcher(reader); Query query = new TermQuery(new Term("none", "none")); QueryWrapperFilter queryFilter = new QueryWrapperFilter(query); CachingWrapperFilter cachingFilter = new CachingWrapperFilter(queryFilter); searcher.search(query, cachingFilter, 1); CachingWrapperFilter cachingFilter2 = new CachingWrapperFilter(queryFilter); Filter[] chain = new Filter[2]; chain[0] = cachingFilter; chain[1] = cachingFilter2; ChainedFilter cf = new ChainedFilter(chain); // throws java.lang.ClassCastException: org.apache.lucene.util.OpenBitSet cannot be cast to java.util.BitSet searcher.search(new MatchAllDocsQuery(), cf, 1); reader.close(); dir.close(); }
public void testQueries() throws Exception { int n = atLeast(4); for (int i = 0; i < n; i++) { Filter odd = new QueryWrapperFilter(new TermQuery(new Term("oddeven", "odd"))); assertQuery(new MatchAllDocsQuery(), null); assertQuery(new TermQuery(new Term("english", "one")), null); assertQuery(new MatchAllDocsQuery(), odd); assertQuery(new TermQuery(new Term("english", "four")), odd); BooleanQuery bq = new BooleanQuery(); bq.add(new TermQuery(new Term("english", "one")), BooleanClause.Occur.SHOULD); bq.add(new TermQuery(new Term("oddeven", "even")), BooleanClause.Occur.SHOULD); assertQuery(bq, null); // force in order bq.add(new TermQuery(new Term("english", "two")), BooleanClause.Occur.SHOULD); bq.setMinimumNumberShouldMatch(2); assertQuery(bq, null); } }
@org.hibernate.search.annotations.Factory public Filter buildFilter(){ QueryParser qp = new QueryParser(Version.LUCENE_CURRENT,field, analyzer); Query q; try { if(allowLuceneSyntax) q = qp.parse(value); else q = qp.parse(QueryParser.escape(value)); } catch (ParseException e) { org.webdsl.logging.Logger.error("Error while parsing query in field filter: "); org.webdsl.logging.Logger.error("EXCEPTION",e); q = new TermQuery(new Term(field, value)); } Filter filter = new QueryWrapperFilter(q); filter = new CachingWrapperFilter( filter ); return filter; }
/** * Set up the filters for collections - this is for searching within collections. * * @param collection - to search within * @return - created filter * @throws ParseException */ private List<Filter> getCollectionFilters(InstitutionalCollection collection) throws ParseException { List<Filter> filters = new LinkedList<Filter>(); //isolate the collection root Term t = new Term("collection_root_id", NumericUtils.longToPrefixCoded(collection.getTreeRoot().getId())); Query subQuery = new TermQuery( t ); filters.add(new QueryWrapperFilter(subQuery)); //isolate the range of children subQuery = NumericRangeQuery.newLongRange("collection_left_value", collection.getLeftValue(), collection.getRightValue(), true, true); filters.add(new QueryWrapperFilter(subQuery)); return filters; }
/** * Execute the sub query facets and return the search results * @throws ParseException * @throws IOException */ private List<Filter> getSubQueryFilters( List<FacetFilter> filters, IndexSearcher searcher) throws ParseException, IOException { List<Filter> luceneFilters = new LinkedList<Filter>(); for(FacetFilter filter : filters) { if(log.isDebugEnabled()) { log.debug("adding filter for field " + filter.getField() + " and query " + filter.getQuery()); } QueryParser subQueryParser = new QueryParser(Version.LUCENE_35, filter.getField(), analyzer); subQueryParser.setDefaultOperator(QueryParser.AND_OPERATOR); String fixedQuery = SearchHelper.prepareFacetSearchString(filter.getQuery(), false); fixedQuery = "\"" + fixedQuery + "\""; Query subQuery = subQueryParser.parse(fixedQuery); if(log.isDebugEnabled()) { log.debug("sub query ing getSubQueryFilters is " + fixedQuery); } luceneFilters.add(new QueryWrapperFilter(subQuery)); } return luceneFilters; }
/** * Creates a directory filter; also filters a range of pages * @param constrainField The field that contains the directory info * @param constrainValues The directories to which the filters shold limit * @return The created filter */ private Filter getFilter(String constrainField, List<String> constrainValues, int type, int startPage, int endPage){ BooleanQuery cqry = new BooleanQuery(); if(constrainValues.size() == 1){ cqry.add(new TermQuery(new Term(constrainField, constrainValues.get(0))), BooleanClause.Occur.MUST); } else { for(String s : constrainValues) { cqry.add(new TermQuery(new Term(constrainField, s)), BooleanClause.Occur.SHOULD); } } if(type == FileSearcher.QUERY_BOOLEAN && startPage != -1 && endPage != -1) { cqry.add(NumericRangeQuery.newIntRange("page", startPage, endPage, true, true), BooleanClause.Occur.MUST); } return new QueryWrapperFilter(cqry); }
private Filter buildNewFilter(Query query, ConcurrentMap<String, String> filterAlias, FilterParser filterParser) throws ParseException { if (query instanceof BooleanQuery) { BooleanQuery booleanQuery = (BooleanQuery) query; BooleanFilter booleanFilter = new BooleanFilter(); for (BooleanClause clause : booleanQuery.clauses()) { booleanFilter.add(buildNewFilter(clause.getQuery(), filterAlias, filterParser), clause.getOccur()); } return booleanFilter; } else if (query instanceof TermQuery) { TermQuery termQuery = (TermQuery) query; Term term = termQuery.getTerm(); String key = term.toString(); String queryStr = filterAlias.get(key); if (queryStr == null) { return new QueryWrapperFilter(termQuery); } String id = getId(key); return new FilterCache(id, new QueryWrapperFilter(filterParser.parse(queryStr))); } else { return new QueryWrapperFilter(query); } }
@Test public void testFetchRowByRecordIdWithFilterNoHit() throws Exception { IndexManagerTestReadInterceptor.interceptor = new ReadInterceptor(null) { @Override public Filter getFilter() { return new QueryWrapperFilter(new TermQuery(new Term(FAMILY + ".testcol1", "NOHIT"))); } }; Selector selector = new Selector().setRowId("row-1").setRecordId("record-1").setRecordOnly(true); FetchResult fetchResult = new FetchResult(); indexManager.fetchRow(TABLE, selector, fetchResult); assertFalse(fetchResult.deleted); assertFalse(fetchResult.exists); assertEquals(TABLE, fetchResult.table); assertNull(fetchResult.rowResult); assertNull(fetchResult.recordResult); }
@Override @SuppressWarnings("unchecked") public Query parse() throws SyntaxError { Method method = Method.valueOf(localParams.get(METHOD, Method.termsFilter.name())); JoinSpec<T> js = JoinSpec.parse(localParams.get(QueryParsing.V)); Iterator<T> it = js.iterator(this); if (joinField == null) { throw new Exception("No XJoin component referenced by query"); } FieldType ft = req.getSchema().getFieldTypeNoEx(joinField); Iterator<BytesRef> bytesRefs = new TransformIterator(it, transformer(ft)); if (! bytesRefs.hasNext()) { return new BooleanQuery.Builder().build(); // matches nothing } Query query = method.makeQuery(joinField, bytesRefs); return new SolrConstantScoreQuery(new QueryWrapperFilter(query)); }
public void testSecurityFilter() throws Exception { TermQuery query = new TermQuery( //#1 new Term("keywords", "info")); //#1 assertEquals("Both documents match", //#2 2, //#2 TestUtil.hitCount(searcher, query)); //#2 Filter jakeFilter = new QueryWrapperFilter( //#3 new TermQuery(new Term("owner", "jake"))); //#3 TopDocs hits = searcher.search(query, jakeFilter, 10); assertEquals(1, hits.totalHits); //#4 assertEquals("elwood is safe", //#4 "jake's sensitive info", //#4 searcher.doc(hits.scoreDocs[0].doc) //#4 .get("keywords")); //#4 }
private static long applyQueryDeletes(Iterable<QueryAndLimit> queriesIter, ReadersAndUpdates rld, final SegmentReader reader) throws IOException { long delCount = 0; final AtomicReaderContext readerContext = reader.getContext(); boolean any = false; for (QueryAndLimit ent : queriesIter) { Query query = ent.query; int limit = ent.limit; final DocIdSet docs = new QueryWrapperFilter(query).getDocIdSet(readerContext, reader.getLiveDocs()); if (docs != null) { final DocIdSetIterator it = docs.iterator(); if (it != null) { while(true) { int doc = it.nextDoc(); if (doc >= limit) { break; } if (!any) { rld.initWritableLiveDocs(); any = true; } if (rld.delete(doc)) { delCount++; } } } } } return delCount; }
public static Filter getQueryFilter(String query){ try{ WhitespaceAnalyzer sa= new WhitespaceAnalyzer(); QueryParser p = new QueryParser("contents",sa); Query q = p.parse(query); Filter filter = new QueryWrapperFilter(q); return filter; }catch (Exception e){ return null; } }
/** * Returns a filter that applies a query to an specific field */ public static Filter fieldQuery(final Analyzer analyzer, final String field, final String query) { if (StringUtils.isEmpty(query)) { return null; } QueryParser parser = new QueryParser(LuceneUtils.LUCENE_VERSION, field, analyzer); try { Query q = parser.parse(query); return new QueryWrapperFilter(q); } catch (ParseException e) { throw new QueryParseException(); } }
private Query spanFilter(SpanQuery query) { if (query instanceof SpanNearQuery) { return spanNearFilter((SpanNearQuery) query); } else if (query instanceof SpanNotQuery) { return spanNotFilter((SpanNotQuery) query); } else if (query instanceof SpanOrQuery) { return spanOrFilter((SpanOrQuery) query); } else if (query instanceof SpanTermQuery) { return new TermQuery(((SpanTermQuery) query).getTerm()); } else if (query instanceof SpanMultiTermQueryWrapper) { return ((SpanMultiTermQueryWrapper) query).getWrappedQuery(); } else { return new QueryWrapperFilter(query); } }
@Override public Filter makeFilter(SpatialArgs args) { //unwrap the CSQ from makeQuery ConstantScoreQuery csq = makeQuery(args); Filter filter = csq.getFilter(); if (filter != null) return filter; else return new QueryWrapperFilter(csq.getQuery()); }
@Before public void before() throws Exception { directory = newDirectory(); final IndexWriterConfig config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); final IndexWriter indexWriter = new IndexWriter(directory, config); for (int i = 0; i < AMOUNT_OF_SEGMENTS; i++) { List<Document> segmentDocs = createDocsForSegment(i); indexWriter.addDocuments(segmentDocs); indexWriter.commit(); } indexReader = DirectoryReader.open(indexWriter, random().nextBoolean()); indexWriter.close(); indexSearcher = new IndexSearcher(indexReader); parentsFilter = new FixedBitSetCachingWrapperFilter(new QueryWrapperFilter(new WildcardQuery(new Term("parent", "*")))); }
@Override Filter makeFilter(String fname, BytesRef[] byteRefs) { BooleanQuery bq = new BooleanQuery(true); for (BytesRef byteRef : byteRefs) { bq.add(new TermQuery(new Term(fname, byteRef)), BooleanClause.Occur.SHOULD); } return new QueryWrapperFilter(bq); }
/** * Determines the number of hits for each facet across the main query. * * @param facets * @param reader * @param mainQueryBits * @throws ParseException * @throws IOException */ private void processFacetCategory(Collection<FacetResult> facets, IndexReader reader, OpenBitSetDISI mainQueryBitSet, IndexSearcher searcher) throws ParseException, IOException { for(FacetResult f : facets ) { long count = 0; String searchString = f.getFacetName(); if( !searchString.trim().equals("")) { QueryParser subQueryParser = new QueryParser(Version.LUCENE_35, f.getField(), keywordAnalyzer); searchString = "\"" + searchString +"\""; Query subQuery = subQueryParser.parse(searchString); QueryWrapperFilter subQueryWrapper = new QueryWrapperFilter(subQuery); log.debug("Fixed query in process facet catagory 2 = " + subQuery + " subQueryWrapper = " + subQueryWrapper); DocIdSet subQueryBits = subQueryWrapper.getDocIdSet(reader); OpenBitSetDISI subQuerybitSet = new OpenBitSetDISI(subQueryBits.iterator(), reader.maxDoc()); count = getFacetHitCount(mainQueryBitSet, subQuerybitSet); log.debug("count = " + count); } else { log.error("bad search string " + searchString); } f.setHits(count); } }
/** * Execute the sub query facets and return the search results * @throws ParseException * @throws IOException */ private List<Filter> getSubQueryFilters( List<FacetFilter> filters, IndexSearcher searcher) throws ParseException, IOException { List<Filter> luceneFilters = new LinkedList<Filter>(); for(FacetFilter filter : filters) { if(log.isDebugEnabled()) { log.debug("adding filter for field " + filter.getField() + " and query " + filter.getQuery()); } String fixedQuery = filter.getQuery(); QueryParser subQueryParser = new QueryParser(Version.LUCENE_35, filter.getField(), keywordAnalyzer); fixedQuery = "\"" + fixedQuery +"\""; Query subQuery = subQueryParser.parse(fixedQuery); if(log.isDebugEnabled()) { log.debug("fixed query in get sub query filters 1 is " + subQuery); } luceneFilters.add(new QueryWrapperFilter(subQuery)); } return luceneFilters; }
/** * Determines the number of hits for each facet across the main query. * * @param facets * @param reader * @param mainQueryBits * @throws ParseException * @throws IOException */ private void processFacetCategory(Collection<FacetResult> facets, IndexReader reader, DocIdSet mainQueryBits) throws ParseException, IOException { for(FacetResult f : facets ) { QueryParser subQueryParser = new QueryParser(Version.LUCENE_35, f.getField(), analyzer); subQueryParser.setDefaultOperator(QueryParser.AND_OPERATOR); String fixedQuery = SearchHelper.prepareFacetSearchString(f.getFacetName(), false); fixedQuery = "\"" + fixedQuery + "\""; Query subQuery = subQueryParser.parse(fixedQuery); if(log.isDebugEnabled()) { log.debug("Fiexed query in process Facet Category = " + fixedQuery); } QueryWrapperFilter subQueryWrapper = new QueryWrapperFilter(subQuery); DocIdSet subQueryBits = subQueryWrapper.getDocIdSet(reader); OpenBitSetDISI mainQuerybitSet = new OpenBitSetDISI(mainQueryBits.iterator(), maxNumberOfMainQueryHits); OpenBitSetDISI subQuerybitSet = new OpenBitSetDISI(subQueryBits.iterator(), maxNumberOfMainQueryHits); long count = getFacetHitCount(mainQuerybitSet, subQuerybitSet); f.setHits(count); } }
private static long applyQueryDeletes(Iterable<QueryAndLimit> queriesIter, ReadersAndLiveDocs rld, final SegmentReader reader) throws IOException { long delCount = 0; final AtomicReaderContext readerContext = reader.getContext(); boolean any = false; for (QueryAndLimit ent : queriesIter) { Query query = ent.query; int limit = ent.limit; final DocIdSet docs = new QueryWrapperFilter(query).getDocIdSet(readerContext, reader.getLiveDocs()); if (docs != null) { final DocIdSetIterator it = docs.iterator(); if (it != null) { while(true) { int doc = it.nextDoc(); if (doc >= limit) { break; } if (!any) { rld.initWritableLiveDocs(); any = true; } if (rld.delete(doc)) { delCount++; } } } } } return delCount; }
@Override public Filter storePreFilter(String table, String filterStr, Filter filter, FilterParser filterParser) throws ParseException { if (filter instanceof QueryWrapperFilter) { QueryWrapperFilter queryWrapperFilter = (QueryWrapperFilter) filter; Query query = queryWrapperFilter.getQuery(); Filter newFilter = buildNewFilter(query, _tableAliasFilterMap.get(table), filterParser); FilterKey key = new FilterKey(table, filterStr); _preFilterCacheMap.put(key, newFilter); return newFilter; } return filter; }
@Test public void testFetchRowByRowIdWithFilterNoRow() throws Exception { IndexManagerTestReadInterceptor.interceptor = new ReadInterceptor(null) { @Override public Filter getFilter() { return new QueryWrapperFilter(new TermQuery(new Term(FAMILY + ".testcol12", "NOROW-1"))); } }; Selector selector = new Selector().setRowId("row-6"); FetchResult fetchResult = new FetchResult(); indexManager.fetchRow(TABLE, selector, fetchResult); assertTrue(fetchResult.exists); assertFalse(fetchResult.deleted); assertNull(fetchResult.rowResult.row.records); }
@Test public void testQuerySuperQueryTrueWithFilter() throws Exception { IndexManagerTestReadInterceptor.interceptor = new ReadInterceptor(null) { @Override public Filter getFilter() { return new QueryWrapperFilter(new TermQuery(new Term(FAMILY + ".testcol2", "value2"))); } }; BlurQuery blurQuery = new BlurQuery(); blurQuery.query = new Query(); blurQuery.query.query = "test-family.testcol1:value1"; blurQuery.query.rowQuery = true; blurQuery.query.scoreType = ScoreType.SUPER; blurQuery.fetch = 10; blurQuery.minimumNumberOfResults = Long.MAX_VALUE; blurQuery.maxQueryTime = Long.MAX_VALUE; blurQuery.uuid = "1"; BlurResultIterable iterable = indexManager.query(TABLE, blurQuery, null); assertEquals(1, iterable.getTotalResults()); BlurIterator<BlurResult, BlurException> iterator = iterable.iterator(); while (iterator.hasNext()) { BlurResult result = iterator.next(); Selector selector = new Selector().setLocationId(result.getLocationId()); FetchResult fetchResult = new FetchResult(); indexManager.fetchRow(TABLE, selector, fetchResult); assertNotNull(fetchResult.rowResult); assertNull(fetchResult.recordResult); } assertFalse(indexManager.currentQueries(TABLE).isEmpty()); Thread.sleep(2000);// wait for cleanup to fire assertTrue(indexManager.currentQueries(TABLE).isEmpty()); }
@Test public void test1() throws IOException { Filter filter = new QueryWrapperFilter(new TermQuery(new Term("f1", "t1"))); FilterCache filterCache = new FilterCache("filter1", filter); RAMDirectory directory = new RAMDirectory(); writeDocs(filterCache, directory); DirectoryReader reader = DirectoryReader.open(directory); IndexSearcher searcher = new IndexSearcher(reader); Query query = new TermQuery(new Term("f2", "t2")); TopDocs topDocs1 = searcher.search(query, filterCache, 10); assertEquals(1, filterCache.getMisses()); assertEquals(0, filterCache.getHits()); assertEquals(1, topDocs1.totalHits); TopDocs topDocs2 = searcher.search(query, filterCache, 10); assertEquals(1, filterCache.getMisses()); assertEquals(1, filterCache.getHits()); assertEquals(1, topDocs2.totalHits); TopDocs topDocs3 = searcher.search(query, filterCache, 10); assertEquals(1, filterCache.getMisses()); assertEquals(2, filterCache.getHits()); assertEquals(1, topDocs3.totalHits); }
@Override Filter makeFilter(String fname, Iterator<BytesRef> it) { BooleanQuery bq = new BooleanQuery(true); while (it.hasNext()) { bq.add(new TermQuery(new Term(fname, it.next())), BooleanClause.Occur.SHOULD); } return new QueryWrapperFilter(bq); }
/** * When a @FullTextFilterDef annotation associates this factory class with a given name, and a "FullTextQuery.enableFullTextFilter()" is * called with that name as its input parameter, then this method is used to return a Filter with the actual filtering logic. It is * the @Factory annotation that designates this method as having that responsibility for this factory class. */ @Factory public Filter getFilter() { StringTokenizer tokenzier = new StringTokenizer(deviceName.toLowerCase()); PhraseQuery query = new PhraseQuery(); while(tokenzier.hasMoreTokens()) { // By default, field values were converted to lower-case when indexed by Lucene. So be sure to // convert search terms to lower-case in order to make them match. Term term = new Term("supportedDevices.name", tokenzier.nextToken().toLowerCase()); query.add(term); } Filter filter = new QueryWrapperFilter(query); return new CachingWrapperFilter(filter); }