@Test public void testNoRecords() throws Exception { assertEquals(0, cappedTestCollection.count()); MockEndpoint mock = getMockEndpoint("mock:test"); mock.expectedMessageCount(0); //BasicDBObjectBuilder.start().add("capped", true).add("size", 1000000000).add("max", 1000).get() // create a capped collection with max = 1000 CreateCollectionOptions collectionOptions = new CreateCollectionOptions() .capped(true) .sizeInBytes(1000000000) .maxDocuments(1000); db.createCollection(cappedTestCollectionName, collectionOptions); cappedTestCollection = db.getCollection(cappedTestCollectionName, BasicDBObject.class); assertEquals(0, cappedTestCollection.count()); addTestRoutes(); context.startRoute("tailableCursorConsumer1"); Thread.sleep(1000); mock.assertIsSatisfied(); context.stopRoute("tailableCursorConsumer1"); }
private void testThousandRecordsWithRouteId(String routeId) throws Exception { assertEquals(0, cappedTestCollection.count()); MockEndpoint mock = getMockEndpoint("mock:test"); mock.expectedMessageCount(1000); // create a capped collection with max = 1000 //BasicDBObjectBuilder.start().add("capped", true).add("size", 1000000000).add("max", 1000).get() db.createCollection(cappedTestCollectionName, new CreateCollectionOptions() .capped(true) .sizeInBytes(1000000000) .maxDocuments(1000)); cappedTestCollection = db.getCollection(cappedTestCollectionName, BasicDBObject.class); for (int i = 0; i < 1000; i++) { cappedTestCollection.insertOne(new BasicDBObject("increasing", i).append("string", "value" + i)); } assertEquals(1000, cappedTestCollection.count()); addTestRoutes(); context.startRoute(routeId); Thread.sleep(1000); mock.assertIsSatisfied(); context.stopRoute(routeId); }
public static void checkCapped(MongoDatabase database, String collectionName, int size, int maxDocuments, boolean delete) { if (Lists.newArrayList(database.listCollectionNames()).contains(collectionName)) { log.debug("'{}' collection already exists...", collectionName); // Check if already capped Document command = new Document("collStats", collectionName); boolean isCapped = database.runCommand(command, ReadPreference.primary()).getBoolean("capped").booleanValue(); if (!isCapped) { if (delete) { database.getCollection(collectionName).drop(); database.createCollection(collectionName, new CreateCollectionOptions().capped(true).maxDocuments(maxDocuments).sizeInBytes(size)); } else { log.info("'{}' is not capped, converting it...", collectionName); command = new Document("convertToCapped", collectionName).append("size", size).append("max", maxDocuments); database.runCommand(command, ReadPreference.primary()); } } else { log.debug("'{}' collection already capped!", collectionName); } } else { database.createCollection(collectionName, new CreateCollectionOptions().capped(true).maxDocuments(maxDocuments).sizeInBytes(size)); } }
private static MongoCollection<Document> getOrCreateMongoCollection(final MongoDatabase database, final String collectionName, final boolean isCapped, final Integer sizeInBytes) { try { LOGGER.debug("Gettting collection '{}'...", collectionName); // throws IllegalArgumentException if collectionName is invalid return database.getCollection(collectionName); } catch (final IllegalStateException e) { LOGGER.debug("Collection '{}' does not exist.", collectionName); final CreateCollectionOptions options = new CreateCollectionOptions() // @formatter:off .capped(isCapped) .sizeInBytes(sizeInBytes); // @formatter:on LOGGER.debug("Creating collection {} (capped = {}, sizeInBytes = {})", collectionName, isCapped, sizeInBytes); database.createCollection(collectionName, options); return database.getCollection(collectionName); } }
void process(final MappedClass mc, final Validation validation) { if (validation != null) { String collectionName = mc.getCollectionName(); CommandResult result = getDB() .command(new BasicDBObject("collMod", collectionName) .append("validator", parse(validation.value())) .append("validationLevel", validation.level().getValue()) .append("validationAction", validation.action().getValue()) ); if (!result.ok()) { if (result.getInt("code") == 26) { ValidationOptions options = new ValidationOptions() .validator(parse(validation.value())) .validationLevel(validation.level()) .validationAction(validation.action()); getDatabase().createCollection(collectionName, new CreateCollectionOptions().validationOptions(options)); } else { result.throwOnError(); } } } }
public EventQueue(MongoDatabase db, HamsterEngine engine) { this.db = db; this.engine = engine; boolean exists = false; for (String name : db.listCollectionNames()) { if (name.equals(COLLECTION_NAME)) { exists = true; break; } } if (!exists) { CreateCollectionOptions options = new CreateCollectionOptions(); options.capped(true); options.sizeInBytes(QUEUE_SIZE); options.maxDocuments(300); db.createCollection(COLLECTION_NAME, options); } queueCollection = db.getCollection(COLLECTION_NAME); pid = UUID.randomUUID().toString(); Thread queuePoller = new Thread(new Runnable() { @Override public void run() { tailQueue(); } }, "event queue processor"); engine.addThread(queuePoller); queuePoller.start(); }
@Override public Observable<Success> createCollection(final String collectionName, final CreateCollectionOptions options) { return RxObservables.create(Observables.observe(new Block<SingleResultCallback<Success>>() { @Override public void apply(final SingleResultCallback<Success> callback) { wrapped.createCollection(collectionName, options, voidToSuccessCallback(callback)); } }), observableAdapter); }
@Override public Publisher<Success> createCollection(final String collectionName, final CreateCollectionOptions options) { return new ObservableToPublisher<Success>(observe(new Block<SingleResultCallback<Success>>() { @Override public void apply(final SingleResultCallback<Success> callback) { wrapped.createCollection(collectionName, options, voidToSuccessCallback(callback)); } })); }
@Override public Publisher<Success> createCollection(final ClientSession clientSession, final String collectionName, final CreateCollectionOptions options) { return new ObservableToPublisher<Success>(observe(new Block<SingleResultCallback<Success>>() { @Override public void apply(final SingleResultCallback<Success> callback) { wrapped.createCollection(clientSession, collectionName, options, voidToSuccessCallback(callback)); } })); }
@Test public void testBypassDocumentValidation() { checkMinServerVersion(3.2); getDs().save(asList(new User("john doe", new Date()), new User("John Doe", new Date()))); MongoDatabase database = getMongoClient().getDatabase(TEST_DB_NAME); database.getCollection("out_users").drop(); database.createCollection("out_users", new CreateCollectionOptions() .validationOptions(new ValidationOptions() .validator(Document.parse("{ age : { gte : 13 } }")))); try { getDs() .createAggregation(User.class) .match(getDs().find(User.class).field("name").equal("john doe")) .out("out_users", User.class); fail("Document validation should have complained."); } catch (MongoCommandException e) { // expected } getDs() .createAggregation(User.class) .match(getDs().find(User.class).field("name").equal("john doe")) .out("out_users", User.class, builder() .bypassDocumentValidation(true) .build()); Assert.assertEquals(1, getAds().find("out_users", User.class).count()); }
@Test public void testBypassDocumentValidation() { checkMinServerVersion(3.4); getDs().save(asList(new Book("The Banquet", "Dante", 2), new Book("Divine Comedy", "Dante", 1), new Book("Eclogues", "Dante", 2), new Book("The Odyssey", "Homer", 10), new Book("Iliad", "Homer", 10))); Document validator = Document.parse("{ count : { $gt : '10' } }"); ValidationOptions validationOptions = new ValidationOptions() .validator(validator) .validationLevel(ValidationLevel.STRICT) .validationAction(ValidationAction.ERROR); MongoDatabase database = getMongoClient().getDatabase(TEST_DB_NAME); database.getCollection("counts").drop(); database.createCollection("counts", new CreateCollectionOptions().validationOptions(validationOptions)); final String map = "function () { emit(this.author, 1); return; }"; final String reduce = "function (key, values) { return values.length }"; MapReduceOptions<CountResult> options = new MapReduceOptions<CountResult>() .query(getDs().find(Book.class)) .resultType(CountResult.class) .outputType(OutputType.REPLACE) .map(map) .reduce(reduce); try { getDs().mapReduce(options); fail("Document validation should have complained."); } catch (MongoCommandException e) { // expected } getDs().mapReduce(options.bypassDocumentValidation(true)); Assert.assertEquals(2, count(getDs().find(CountResult.class).iterator())); }
private MongoDatabase addValidation(final Document validator, final String collectionName) { ValidationOptions options = new ValidationOptions() .validator(validator) .validationLevel(ValidationLevel.MODERATE) .validationAction(ValidationAction.ERROR); MongoDatabase database = getMongoClient().getDatabase(TEST_DB_NAME); database.getCollection(collectionName).drop(); database.createCollection(collectionName, new CreateCollectionOptions().validationOptions(options)); return database; }
@Override public void createCollection(String collectionName, CreateCollectionOptions createCollectionOptions) { database.createCollection(collectionName, createCollectionOptions); }
@Test public void testMultipleBursts() throws Exception { assertEquals(0, cappedTestCollection.count()); MockEndpoint mock = getMockEndpoint("mock:test"); mock.expectedMessageCount(5000); //BasicDBObjectBuilder.start().add("capped", true).add("size", 1000000000).add("max", 1000).get() // create a capped collection with max = 1000 CreateCollectionOptions createCollectionOptions = new CreateCollectionOptions() .capped(true) .sizeInBytes(1000000000) .maxDocuments(1000); db.createCollection(cappedTestCollectionName, createCollectionOptions); cappedTestCollection = db.getCollection(cappedTestCollectionName, BasicDBObject.class); addTestRoutes(); context.startRoute("tailableCursorConsumer1"); // pump 5 bursts of 1000 records each with 500ms pause between burst and burst Thread t = new Thread(new Runnable() { @Override public void run() { for (int i = 0; i < 5000; i++) { if (i % 1000 == 0) { try { Thread.sleep(500); } catch (InterruptedException e) { return; } } cappedTestCollection.insertOne(new BasicDBObject("increasing", i).append("string", "value" + i)); } } }); // start the data pumping t.start(); // before we assert, wait for the data pumping to end t.join(); mock.assertIsSatisfied(); context.stopRoute("tailableCursorConsumer1"); }
@Test public void testHundredThousandRecords() throws Exception { assertEquals(0, cappedTestCollection.count()); final MockEndpoint mock = getMockEndpoint("mock:test"); mock.expectedMessageCount(1000); // create a capped collection with max = 1000 //BasicDBObjectBuilder.start().add("capped", true).add("size", 1000000000).add("max", 1000).get()) db.createCollection(cappedTestCollectionName, new CreateCollectionOptions() .capped(true) .sizeInBytes(1000000000) .maxDocuments(1000)); cappedTestCollection = db.getCollection(cappedTestCollectionName, BasicDBObject.class); addTestRoutes(); context.startRoute("tailableCursorConsumer1"); // continuous pump of 100000 records, asserting incrementally to reduce overhead on the mock endpoint Thread t = new Thread(new Runnable() { @Override public void run() { for (int i = 1; i <= 100000; i++) { cappedTestCollection.insertOne(new BasicDBObject("increasing", i).append("string", "value" + i)); // incrementally assert, as the mock endpoint stores all messages and otherwise the test would be sluggish if (i % 1000 == 0) { try { MongoDbTailableCursorConsumerTest.this.assertAndResetMockEndpoint(mock); } catch (Exception e) { return; } } } } }); // start the data pumping t.start(); // before we stop the route, wait for the data pumping to end t.join(); context.stopRoute("tailableCursorConsumer1"); }
@Test public void testPersistentTailTrack() throws Exception { assertEquals(0, cappedTestCollection.count()); final MockEndpoint mock = getMockEndpoint("mock:test"); // drop the tracking collection db.getCollection(MongoDbTailTrackingConfig.DEFAULT_COLLECTION).drop(); // create a capped collection with max = 1000 // BasicDBObjectBuilder.start().add("capped", true).add("size", 1000000000).add("max", 1000).get() db.createCollection(cappedTestCollectionName, new CreateCollectionOptions() .capped(true) .sizeInBytes(1000000000) .maxDocuments(1000)); cappedTestCollection = db.getCollection(cappedTestCollectionName, BasicDBObject.class); cappedTestCollection.createIndex(new BasicDBObject("increasing", 1)); addTestRoutes(); context.startRoute("tailableCursorConsumer2"); mock.expectedMessageCount(300); // pump 300 records Thread t = new Thread(new Runnable() { @Override public void run() { for (int i = 1; i <= 300; i++) { cappedTestCollection.insertOne(new BasicDBObject("increasing", i).append("string", "value" + i)); } } }); // start the data pumping t.start(); // before we continue wait for the data pump to end t.join(); mock.assertIsSatisfied(); mock.reset(); context.stopRoute("tailableCursorConsumer2"); while (context.getRouteStatus("tailableCursorConsumer2") != ServiceStatus.Stopped) { } context.startRoute("tailableCursorConsumer2"); // expect 300 messages and not 600 mock.expectedMessageCount(300); // pump 300 records t = new Thread(new Runnable() { @Override public void run() { for (int i = 301; i <= 600; i++) { cappedTestCollection.insertOne(new BasicDBObject("increasing", i).append("string", "value" + i)); } } }); // start the data pumping t.start(); // before we continue wait for the data pump to end t.join(); mock.assertIsSatisfied(); // check that the first message received in this second batch corresponds to increasing=301 Object firstBody = mock.getExchanges().get(0).getIn().getBody(); assertTrue(firstBody instanceof DBObject); assertEquals(301, ((DBObject) firstBody).get("increasing")); // check that the lastVal is persisted at the right time: check before and after stopping the route assertEquals(300, db.getCollection(MongoDbTailTrackingConfig.DEFAULT_COLLECTION).find(new BasicDBObject("persistentId", "darwin")).first().get("lastTrackingValue")); // stop the route and verify the last value has been updated context.stopRoute("tailableCursorConsumer2"); while (context.getRouteStatus("tailableCursorConsumer2") != ServiceStatus.Stopped) { } assertEquals(600, db.getCollection(MongoDbTailTrackingConfig.DEFAULT_COLLECTION).find(new BasicDBObject("persistentId", "darwin")).first().get("lastTrackingValue")); }
@Test public void testPersistentTailTrackIncreasingDateField() throws Exception { assertEquals(0, cappedTestCollection.count()); final MockEndpoint mock = getMockEndpoint("mock:test"); final Calendar startTimestamp = Calendar.getInstance(); // get default tracking collection MongoCollection<BasicDBObject> trackingCol = db.getCollection(MongoDbTailTrackingConfig.DEFAULT_COLLECTION, BasicDBObject.class); trackingCol.drop(); trackingCol = db.getCollection(MongoDbTailTrackingConfig.DEFAULT_COLLECTION, BasicDBObject.class); // create a capped collection with max = 1000 //BasicDBObjectBuilder.start().add("capped", true).add("size", 1000000000).add("max", 1000).get() db.createCollection(cappedTestCollectionName, new CreateCollectionOptions() .capped(true) .sizeInBytes(1000000000) .maxDocuments(1000)); cappedTestCollection = db.getCollection(cappedTestCollectionName, BasicDBObject.class); addTestRoutes(); context.startRoute("tailableCursorConsumer2"); mock.expectedMessageCount(300); // pump 300 records Thread t = new Thread(new Runnable() { @Override public void run() { for (int i = 1; i <= 300; i++) { Calendar c = (Calendar) (startTimestamp.clone()); c.add(Calendar.MINUTE, i); cappedTestCollection.insertOne(new BasicDBObject("increasing", c.getTime()).append("string", "value" + i)); } } }); // start the data pumping t.start(); // before we continue wait for the data pump to end t.join(); mock.assertIsSatisfied(); mock.reset(); // ensure that the persisted lastVal is startTimestamp + 300min Calendar cal300 = (Calendar) startTimestamp.clone(); cal300.add(Calendar.MINUTE, 300); context.stopRoute("tailableCursorConsumer2"); assertEquals(cal300.getTime(), trackingCol.find(new BasicDBObject("persistentId", "darwin")).first().get(MongoDbTailTrackingConfig.DEFAULT_FIELD)); context.startRoute("tailableCursorConsumer2"); // expect 300 messages and not 600 mock.expectedMessageCount(300); // pump 300 records t = new Thread(new Runnable() { @Override public void run() { for (int i = 301; i <= 600; i++) { Calendar c = (Calendar) (startTimestamp.clone()); c.add(Calendar.MINUTE, i); cappedTestCollection.insertOne(new BasicDBObject("increasing", c.getTime()).append("string", "value" + i)); } } }); // start the data pumping t.start(); // before we continue wait for the data pump to end t.join(); mock.assertIsSatisfied(); Object firstBody = mock.getExchanges().get(0).getIn().getBody(); assertTrue(firstBody instanceof DBObject); Calendar cal301 = (Calendar) startTimestamp.clone(); cal301.add(Calendar.MINUTE, 301); assertEquals(cal301.getTime(), ((DBObject) firstBody).get("increasing")); // check that the persisted lastVal after stopping the route is startTimestamp + 600min context.stopRoute("tailableCursorConsumer2"); Calendar cal600 = (Calendar) startTimestamp.clone(); cal600.add(Calendar.MINUTE, 600); assertEquals(cal600.getTime(), trackingCol.find(new BasicDBObject("persistentId", "darwin")).first().get(MongoDbTailTrackingConfig.DEFAULT_FIELD)); }
@Test public void testCustomTailTrackLocation() throws Exception { assertEquals(0, cappedTestCollection.count()); final MockEndpoint mock = getMockEndpoint("mock:test"); // get the custom tracking collection and drop it (tailTrackDb=einstein&tailTrackCollection=curie&tailTrackField=newton) MongoCollection<BasicDBObject> trackingCol = mongo.getDatabase("einstein").getCollection("curie", BasicDBObject.class); trackingCol.drop(); trackingCol = mongo.getDatabase("einstein").getCollection("curie", BasicDBObject.class); // create a capped collection with max = 1000 //BasicDBObjectBuilder.start().add("capped", true).add("size", 1000000000).add("max", 1000).get() db.createCollection(cappedTestCollectionName, new CreateCollectionOptions() .capped(true) .sizeInBytes(1000000000) .maxDocuments(1000)); cappedTestCollection = db.getCollection(cappedTestCollectionName, BasicDBObject.class); addTestRoutes(); context.startRoute("tailableCursorConsumer3"); mock.expectedMessageCount(300); // pump 300 records Thread t = new Thread(new Runnable() { @Override public void run() { for (int i = 1; i <= 300; i++) { cappedTestCollection.insertOne(new BasicDBObject("increasing", i).append("string", "value" + i)); } } }); // start the data pumping t.start(); // before we continue wait for the data pump to end t.join(); mock.assertIsSatisfied(); mock.reset(); // stop the route to ensure that our lastVal is persisted, and check it context.stopRoute("tailableCursorConsumer3"); // ensure that the persisted lastVal is 300, newton is the name of the trackingField we are using assertEquals(300, trackingCol.find(new BasicDBObject("persistentId", "darwin")).first().get("newton")); context.startRoute("tailableCursorConsumer3"); // expect 300 messages and not 600 mock.expectedMessageCount(300); // pump 300 records t = new Thread(new Runnable() { @Override public void run() { for (int i = 301; i <= 600; i++) { cappedTestCollection.insertOne(new BasicDBObject("increasing", i).append("string", "value" + i)); } } }); // start the data pumping t.start(); // before we continue wait for the data pump to end t.join(); mock.assertIsSatisfied(); // check that the first received body contains increasing=301 and not increasing=1, i.e. it's not starting from the top Object firstBody = mock.getExchanges().get(0).getIn().getBody(); assertTrue(firstBody instanceof DBObject); assertEquals(301, ((DBObject) firstBody).get("increasing")); // check that the persisted lastVal after stopping the route is 600, newton is the name of the trackingField we are using context.stopRoute("tailableCursorConsumer3"); assertEquals(600, trackingCol.find(new BasicDBObject("persistentId", "darwin")).first().get("newton")); }
@Override public Observable<Success> createCollection(final String collectionName) { return createCollection(collectionName, new CreateCollectionOptions()); }
@Override public void createCollection( String collectionName, CreateCollectionOptions createCollectionOptions) { }
@Override public Publisher<Success> createCollection(final String collectionName) { return createCollection(collectionName, new CreateCollectionOptions()); }
@Override public Publisher<Success> createCollection(final ClientSession clientSession, final String collectionName) { return createCollection(clientSession, collectionName, new CreateCollectionOptions()); }
/** * Create all collections in the database, then setup necessary environment parameters. */ public void setupEmptyDatabase() { this.database.createCollection(MOVIES_COLLECTION, new CreateCollectionOptions().capped(false)); }
/** * Create a new collection with the selected options * * @param collectionName the name for the new collection to create * @param options various options for creating the collection * @return an observable identifying when the collection has been created * @mongodb.driver.manual reference/commands/create Create Command */ Observable<Success> createCollection(String collectionName, CreateCollectionOptions options);
/** * Create a new collection with the selected options * * @param collectionName the name for the new collection to create * @param options various options for creating the collection * @return a publisher identifying when the collection has been created * @mongodb.driver.manual reference/commands/create Create Command */ Publisher<Success> createCollection(String collectionName, CreateCollectionOptions options);
/** * Create a new collection with the selected options * * @param clientSession the client session with which to associate this operation * @param collectionName the name for the new collection to create * @param options various options for creating the collection * @return a publisher identifying when the collection has been created * @mongodb.driver.manual reference/commands/create Create Command * @mongodb.server.release 3.6 * @since 1.7 */ Publisher<Success> createCollection(ClientSession clientSession, String collectionName, CreateCollectionOptions options);