/** * Return non-redundant timestamps of all graph element events * * @return HashSet<Long> timestamps */ public TreeSet<Long> getTimestamps() { TreeSet<Long> timestampSet = new TreeSet<Long>(); Function<BsonDateTime, Long> mapper = new Function<BsonDateTime, Long>() { @Override public Long apply(BsonDateTime val) { return val.getValue(); } }; edgeEvents.distinct(Tokens.TIMESTAMP, BsonDateTime.class) .filter(new BsonDocument(Tokens.TIMESTAMP, new BsonDocument(Tokens.FC.$ne.toString(), new BsonNull()))) .map(mapper).into(timestampSet); Set<Long> vtSet = new TreeSet<Long>(); vertexEvents.distinct(Tokens.TIMESTAMP, BsonDateTime.class) .filter(new BsonDocument(Tokens.TIMESTAMP, new BsonDocument(Tokens.FC.$ne.toString(), new BsonNull()))) .map(mapper).into(vtSet); timestampSet.addAll(vtSet); return timestampSet; }
public TreeSet<Long> getTimestamps(Long startTime, Long endTime) { TreeSet<Long> timestampSet = new TreeSet<Long>(); Function<BsonDateTime, Long> mapper = new Function<BsonDateTime, Long>() { @Override public Long apply(BsonDateTime val) { return val.getValue(); } }; edgeEvents.distinct(Tokens.TIMESTAMP, BsonDateTime.class) .filter(new BsonDocument(Tokens.TIMESTAMP, new BsonDocument(Tokens.FC.$gt.toString(), new BsonDateTime(startTime)) .append(Tokens.FC.$lt.toString(), new BsonDateTime(endTime)))) .map(mapper).into(timestampSet); Set<Long> vtSet = new TreeSet<Long>(); vertexEvents.distinct(Tokens.TIMESTAMP, BsonDateTime.class) .filter(new BsonDocument(Tokens.TIMESTAMP, new BsonDocument(Tokens.FC.$gt.toString(), new BsonDateTime(startTime)) .append(Tokens.FC.$lt.toString(), new BsonDateTime(endTime)))) .map(mapper).into(timestampSet); timestampSet.addAll(vtSet); return timestampSet; }
public HashSet<Long> getTimestampsHashSet() { HashSet<Long> timestampSet = new HashSet<Long>(); Function<BsonDateTime, Long> mapper = new Function<BsonDateTime, Long>() { @Override public Long apply(BsonDateTime val) { return val.getValue(); } }; edges.distinct(Tokens.TIMESTAMP, BsonDateTime.class) .filter(new BsonDocument(Tokens.TIMESTAMP, new BsonDocument(Tokens.FC.$ne.toString(), new BsonNull()))) .map(mapper).into(timestampSet); return timestampSet; }
public ProfiledMapMongotIterable(MongoIterable<U> map, Function<TResult, U> mapper, ProfiledCursorCreator creator) { super(); this.mongoIterable = map; this.mapper = mapper; this.creator = creator; }
@Test public void testAggregate() { List<Document> docList = new ArrayList<Document>(); coll.aggregate(Arrays.asList(Aggregates.match(Filters.eq("name", "Alto")), Aggregates.group("color", Accumulators.sum("count", 1)))).into(docList); assertEquals(1, docList.size()); docList.clear(); Document first = coll .aggregate(Arrays.asList(Aggregates.match(Filters.eq("name", "Alto")), Aggregates.group("color", Accumulators.sum("count", 1))), Document.class) .allowDiskUse(true).batchSize(12).bypassDocumentValidation(true).collation(Collation.builder().build()) .first(); Assert.assertNotNull(first); first = coll .aggregate(Arrays.asList(Aggregates.match(Filters.eq("name", "Alto")), Aggregates.group("color", Accumulators.sum("count", 1))), Document.class) .allowDiskUse(true).batchSize(12).bypassDocumentValidation(true).collation(Collation.builder().build()) .map(new Function<Document, Document>() { @Override public Document apply(Document t) { t.put("hello", "world"); return t; } }).first(); Assert.assertNotNull(first); }
public MongoIterable<String> createMap() { return coll.find(Filters.eq("name", "Alto")).map(new Function<Document, String>() { @Override public String apply(Document t) { return t.getString("name"); } }); }
private <T> PaginatedCollection<T> wildcardSearch(Bson filter, String collection, Function<Document,T> mapper) { return RestHelper.paginatedList( database.getCollection(collection) .find(filter) .map(mapper) ); }
private <T> PaginatedCollection<T> textSearch(String query, String collection, Function<Document,T> mapper) { return RestHelper.paginatedList( database.getCollection(collection) .find(Filters.text(query)) .projection(Projections.metaTextScore("score")) .sort(Sorts.metaTextScore("score")) .map(mapper) ); }
/** * Return an iterable to all the vertices in the graph. If this is not possible * for the implementation, then an UnsupportedOperationException can be thrown. * * @return an iterable reference to all vertices in the graph */ public Iterable<ChronoVertex> getChronoVertices() { HashSet<String> idSet = new HashSet<String>(); Function<BsonString, String> mapper = new Function<BsonString, String>() { @Override public String apply(BsonString val) { return val.getValue(); } }; HashSet<String> outV = new HashSet<String>(); edges.distinct(Tokens.OUT_VERTEX, BsonString.class) .filter(new BsonDocument(Tokens.OUT_VERTEX, new BsonDocument(Tokens.FC.$ne.toString(), new BsonNull()))) .map(mapper).into(outV); idSet.addAll(outV); HashSet<String> inV = new HashSet<String>(); edges.distinct(Tokens.IN_VERTEX, BsonString.class) .filter(new BsonDocument(Tokens.IN_VERTEX, new BsonDocument(Tokens.FC.$ne.toString(), new BsonNull()))) .map(mapper).into(inV); idSet.addAll(inV); MongoCursor<BsonDocument> vi = vertices.find(Tokens.FLT_VERTEX_FIELD_NOT_INCLUDED) .projection(Tokens.PRJ_ONLY_ID).iterator(); while (vi.hasNext()) { BsonDocument d = vi.next(); idSet.add(d.getString(Tokens.ID).getValue()); } HashSet<String> vertex = new HashSet<String>(); vertices.distinct(Tokens.VERTEX, BsonString.class) .filter(new BsonDocument(Tokens.VERTEX, new BsonDocument(Tokens.FC.$ne.toString(), new BsonNull()))) .map(mapper).into(vertex); idSet.addAll(vertex); return idSet.parallelStream().map(s -> new ChronoVertex(s, this)).collect(Collectors.toSet()); }
/** * Return an iterable to all the vertices in the graph. If this is not possible * for the implementation, then an UnsupportedOperationException can be thrown. * * @return an iterable reference to all vertices in the graph */ public Set<ChronoVertex> getChronoVertexSet() { HashSet<String> idSet = new HashSet<String>(); Function<BsonDocument, String> mapper = new Function<BsonDocument, String>() { @Override public String apply(BsonDocument val) { return val.getString(Tokens.ID).getValue(); } }; HashSet<String> outV = new HashSet<String>(); ArrayList<BsonDocument> outVQuery = new ArrayList<BsonDocument>(); outVQuery.add(new BsonDocument("$group", new BsonDocument(Tokens.ID, new BsonString("$" + Tokens.OUT_VERTEX)))); edges.aggregate(outVQuery).map(mapper).into(outV); HashSet<String> inV = new HashSet<String>(); ArrayList<BsonDocument> inVQuery = new ArrayList<BsonDocument>(); inVQuery.add(new BsonDocument("$group", new BsonDocument(Tokens.ID, new BsonString("$" + Tokens.IN_VERTEX)))); edges.aggregate(inVQuery).map(mapper).into(inV); idSet.addAll(inV); MongoCursor<BsonDocument> vi = vertices.find(Tokens.FLT_VERTEX_FIELD_NOT_INCLUDED) .projection(Tokens.PRJ_ONLY_ID).iterator(); while (vi.hasNext()) { BsonDocument d = vi.next(); idSet.add(d.getString(Tokens.ID).getValue()); } return idSet.parallelStream().map(s -> new ChronoVertex(s, this)).collect(Collectors.toSet()); }
public Set<ChronoVertex> getOutVertexSet() { Function<BsonDocument, String> mapper = new Function<BsonDocument, String>() { @Override public String apply(BsonDocument val) { return val.getString(Tokens.ID).getValue(); } }; HashSet<String> outV = new HashSet<String>(); ArrayList<BsonDocument> outVQuery = new ArrayList<BsonDocument>(); outVQuery.add(new BsonDocument("$group", new BsonDocument(Tokens.ID, new BsonString("$" + Tokens.OUT_VERTEX)))); edges.aggregate(outVQuery).map(mapper).into(outV); return outV.parallelStream().map(s -> new ChronoVertex(s, this)).collect(Collectors.toSet()); }
/** * Return an iterable to all the vertices in the graph. If this is not possible * for the implementation, then an UnsupportedOperationException can be thrown. * * @return an iterable reference to all vertices in the graph */ public Stream<ChronoVertex> getChronoVertexStream(boolean isParallel) { HashSet<String> idSet = new HashSet<String>(); Function<BsonString, String> mapper = new Function<BsonString, String>() { @Override public String apply(BsonString val) { return val.getValue(); } }; HashSet<String> outV = new HashSet<String>(); edges.distinct(Tokens.OUT_VERTEX, BsonString.class) .filter(new BsonDocument(Tokens.OUT_VERTEX, new BsonDocument(Tokens.FC.$ne.toString(), new BsonNull()))) .map(mapper).into(outV); idSet.addAll(outV); HashSet<String> inV = new HashSet<String>(); edges.distinct(Tokens.IN_VERTEX, BsonString.class) .filter(new BsonDocument(Tokens.IN_VERTEX, new BsonDocument(Tokens.FC.$ne.toString(), new BsonNull()))) .map(mapper).into(inV); idSet.addAll(inV); MongoCursor<BsonDocument> vi = vertices.find(Tokens.FLT_VERTEX_FIELD_NOT_INCLUDED) .projection(Tokens.PRJ_ONLY_ID).iterator(); while (vi.hasNext()) { BsonDocument d = vi.next(); idSet.add(d.getString(Tokens.ID).getValue()); } HashSet<String> vertex = new HashSet<String>(); vertices.distinct(Tokens.VERTEX, BsonString.class) .filter(new BsonDocument(Tokens.VERTEX, new BsonDocument(Tokens.FC.$ne.toString(), new BsonNull()))) .map(mapper).into(vertex); idSet.addAll(vertex); if (isParallel) return idSet.parallelStream().map(s -> new ChronoVertex(s, this)).collect(Collectors.toSet()) .parallelStream(); else return idSet.parallelStream().map(s -> new ChronoVertex(s, this)).collect(Collectors.toSet()).stream(); }
public Set<VertexEvent> getOutVertexEventSet(final AC tt) { while (true) { try { // db.tEdgeEvents.aggregate([{$match:{"_o":"1","_t":{ $lt : ISODate(0) // }}},{$project:{"_i":1,"_t":1,"_id":0}},{$group:{"_id":"$_i", "_mt": {$min: // "$_t"}}}]) BsonDocument match = new BsonDocument("$match", new BsonDocument(Tokens.OUT_VERTEX, new BsonString(vertex.toString())).append(Tokens.TIMESTAMP, new BsonDocument("$gt", new BsonDateTime(timestamp)))); BsonDocument project = new BsonDocument("$project", new BsonDocument(Tokens.IN_VERTEX, new BsonBoolean(true)) .append(Tokens.TIMESTAMP, new BsonBoolean(true)) .append(Tokens.ID, new BsonBoolean(false))); BsonDocument group = new BsonDocument("$group", new BsonDocument(Tokens.ID, new BsonString("$" + Tokens.IN_VERTEX)).append(Tokens.TIMESTAMP, new BsonDocument("$min", new BsonString("$" + Tokens.TIMESTAMP)))); ArrayList<BsonDocument> aggregateQuery = new ArrayList<BsonDocument>(); aggregateQuery.add(match); aggregateQuery.add(project); aggregateQuery.add(group); HashSet<VertexEvent> ret = new HashSet<VertexEvent>(); Function<BsonDocument, VertexEvent> mapper = new Function<BsonDocument, VertexEvent>() { @Override public VertexEvent apply(BsonDocument d) { String inV = d.getString(Tokens.ID).getValue(); Long t = d.getDateTime(Tokens.TIMESTAMP).getValue(); return new VertexEvent(graph, new ChronoVertex(inV, graph), t); } }; vertex.graph.getEdgeEvents().aggregate(aggregateQuery).map(mapper).into(ret); return ret; } catch (MongoCursorNotFoundException e1) { System.out.println(e1.getErrorMessage()); } } }