private void updateSingleRecord(List<WriteModel<Document>> bulkWriter, Document key) { // Key Query rotateCollection(); Document query = new Document(); Document change; if (key == null) { int range = sequence * testOpts.workingset / 100; int rest = sequence - range; int recordno = rest + getNextVal(range); query.append("_id", new Document("w", workerID).append("i", recordno)); } else { query.append("_id", key); } int updateFields = (testOpts.updateFields <= testOpts.numFields) ? testOpts.updateFields : testOpts.numFields; if (updateFields == 1) { long changedfield = (long) getNextVal((int) testOpts.NUMBER_SIZE); Document fields = new Document("fld0", changedfield); change = new Document("$set", fields); } else { TestRecord tr = createNewRecord(); tr.internalDoc.remove("_id"); change = new Document("$set", tr.internalDoc); } if (testOpts.findandmodify == false) { bulkWriter.add(new UpdateManyModel<Document>(query, change)); } else { this.coll.findOneAndUpdate(query, change); //These are immediate not batches } testResults.RecordOpsDone("updates", 1); }
private List<WriteModel<JsonObject>> convertBulkOperations(List<BulkOperation> operations) { List<WriteModel<JsonObject>> result = new ArrayList<>(operations.size()); for (BulkOperation bulkOperation : operations) { switch (bulkOperation.getType()) { case DELETE: Bson bsonFilter = toBson(encodeKeyWhenUseObjectId(bulkOperation.getFilter())); if (bulkOperation.isMulti()) { result.add(new DeleteManyModel<>(bsonFilter)); } else { result.add(new DeleteOneModel<>(bsonFilter)); } break; case INSERT: result.add(new InsertOneModel<>(encodeKeyWhenUseObjectId(bulkOperation.getDocument()))); break; case REPLACE: result.add(new ReplaceOneModel<>(toBson(encodeKeyWhenUseObjectId(bulkOperation.getFilter())), bulkOperation.getDocument(), new com.mongodb.client.model.UpdateOptions().upsert(bulkOperation.isUpsert()))); break; case UPDATE: Bson filter = toBson(encodeKeyWhenUseObjectId(bulkOperation.getFilter())); Bson document = toBson(encodeKeyWhenUseObjectId(bulkOperation.getDocument())); com.mongodb.client.model.UpdateOptions updateOptions = new com.mongodb.client.model.UpdateOptions() .upsert(bulkOperation.isUpsert()); if (bulkOperation.isMulti()) { result.add(new UpdateManyModel<>(filter, document, updateOptions)); } else { result.add(new UpdateOneModel<>(filter, document, updateOptions)); } break; default: throw new IllegalArgumentException("Unknown bulk operation type: " + bulkOperation.getClass()); } } return result; }
@Override public BulkOperationResult bulkPatchDocuments( String dbName, String collName, BsonDocument filter, BsonDocument shardedKeys, BsonDocument data) { MongoDatabase mdb = client.getDatabase(dbName); MongoCollection<BsonDocument> mcoll = mdb.getCollection(collName, BsonDocument.class); List<WriteModel<BsonDocument>> patches = new ArrayList<>(); Bson _filter; if (shardedKeys != null) { _filter = and(filter, shardedKeys); } else { _filter = filter; } patches.add(new UpdateManyModel<>( _filter, DAOUtils.getUpdateDocument(data), DAOUtils.U_NOT_UPSERT_OPS)); BulkWriteResult result = mcoll.bulkWrite(patches); return new BulkOperationResult(HttpStatus.SC_OK, null, result); }
protected Map<Integer, BulkWriteError> executeBulkUpdate(List<UpdateManyModel<Document>> documentsToUpdate) { // mapping of array indices for flow file errors Map<Integer, BulkWriteError> writeErrors = new HashMap<>(); try { collection.bulkWrite(documentsToUpdate); } catch (MongoBulkWriteException e) { List<BulkWriteError> errors = e.getWriteErrors(); for (BulkWriteError docError : errors) { writeErrors.put(docError.getIndex(), docError); } getLogger().warn("Error occurred during bulk write", e); } return writeErrors; }