/** * update 6: {@link UserDocument} has changed; 'name' and 'surname' will be concat to 'name'. * for each every user document get 'name' and 'surname', concat them, update 'name', remove field surname and update document. * * @since V7 */ @ChangeSet(order = "008", id = "updateUserChangeNameAndSurnameToName", author = "admin") public void updateUserChangeNameAndSurnametoName(final MongoTemplate template) { final DBCollection userCollection = template.getCollection("user"); final Iterator<DBObject> cursor = userCollection.find(); while (cursor.hasNext()) { final DBObject current = cursor.next(); final Object nameObj = current.get("name"); final Object surnameObj = current.get("surname"); final String updateName = (nameObj != null ? nameObj.toString() : "") + " " + (surnameObj != null ? surnameObj.toString() : ""); final BasicDBObject updateQuery = new BasicDBObject(); updateQuery.append("$set", new BasicDBObject("name", updateName)); updateQuery.append("$unset", new BasicDBObject("surname", "")); final BasicDBObject searchQuery = new BasicDBObject(); searchQuery.put("_id", current.get("_id")); userCollection.update(searchQuery, updateQuery); } }
public int getNextId(GridFS destDatabase) { DBCollection countersCollection = destDatabase.getDB().getCollection("counters"); DBObject record = countersCollection.findOne(new BasicDBObject("_id", "package")); if (record == null) { BasicDBObject dbObject = new BasicDBObject("_id", "package"); dbObject.append("seq", 0); countersCollection.insert(dbObject); record = dbObject; } int oldID = (int) record.get("seq"); int newID = oldID + 1; record.put("seq", newID); countersCollection.update(new BasicDBObject("_id", "package"), record); return newID; }
public DBObject selectOne(DBCollection collection) { DBObject fields = getFields(); DBObject query = getQuery(); DBObject orderByObject = getOrderByObject(); // 日志 log(fields, query, orderByObject); if (null == fields && null == orderByObject) { return collection.findOne(query); } else if (null != fields && null == orderByObject) { return collection.findOne(query, fields); } else { return collection.findOne(query, fields, orderByObject); } }
/** * Init registry. **/ @PostConstruct public void initialize() { Assert.notNull(this.mongoTemplate); LOGGER.debug("Setting up MongoDb Ticket Registry instance [{}]", this.collectionName); if (this.dropCollection) { LOGGER.debug("Dropping database collection: [{}]", this.collectionName); this.mongoTemplate.dropCollection(this.collectionName); } if (!this.mongoTemplate.collectionExists(this.collectionName)) { LOGGER.debug("Creating database collection: [{}]", this.collectionName); this.mongoTemplate.createCollection(this.collectionName); } LOGGER.debug("Creating indices on collection [{}] to auto-expire documents...", this.collectionName); final DBCollection collection = mongoTemplate.getCollection(this.collectionName); collection.createIndex(new BasicDBObject(TicketHolder.FIELD_NAME_EXPIRE_AT, 1), new BasicDBObject("expireAfterSeconds", 0)); LOGGER.info("Configured MongoDb Ticket Registry instance [{}]", this.collectionName); }
private int doBatchUpdate(DBCollection dbCollection, String collName, Collection<BatchUpdateOptions> options, boolean ordered) { DBObject command = new BasicDBObject(); command.put("update", collName); List<BasicDBObject> updateList = new ArrayList<BasicDBObject>(); for (BatchUpdateOptions option : options) { BasicDBObject update = new BasicDBObject(); update.put("q", option.getQuery().getQueryObject()); update.put("u", option.getUpdate().getUpdateObject()); update.put("upsert", option.isUpsert()); update.put("multi", option.isMulti()); updateList.add(update); } command.put("updates", updateList); command.put("ordered", ordered); CommandResult commandResult = dbCollection.getDB().command(command); return Integer.parseInt(commandResult.get("n").toString()); }
private int InsertData(SQLInsertStatement state) { if (state.getValues().getValues().size() ==0 ){ throw new RuntimeException("number of columns error"); } if (state.getValues().getValues().size() != state.getColumns().size()){ throw new RuntimeException("number of values and columns have to match"); } SQLTableSource table=state.getTableSource(); BasicDBObject o = new BasicDBObject(); int i=0; for(SQLExpr col : state.getColumns()) { o.put(getFieldName2(col), getExpValue(state.getValues().getValues().get(i))); i++; } DBCollection coll =this._db.getCollection(table.toString()); coll.insert(new DBObject[] { o }); return 1; }
private int UpData(SQLUpdateStatement state) { SQLTableSource table=state.getTableSource(); DBCollection coll =this._db.getCollection(table.toString()); SQLExpr expr=state.getWhere(); DBObject query = parserWhere(expr); BasicDBObject set = new BasicDBObject(); for(SQLUpdateSetItem col : state.getItems()){ set.put(getFieldName2(col.getColumn()), getExpValue(col.getValue())); } DBObject mod = new BasicDBObject("$set", set); coll.updateMulti(query, mod); //System.out.println("changs count:"+coll.getStats().size()); return 1; }
/** Read the occasions that are stored in the database and schedule them to run. */ @PostConstruct public void afterCreate() { String method = "afterCreate"; logger.entering(clazz, method); orchestrator.setOccasionResource(this); DBCollection occasions = getCollection(); DBCursor cursor = occasions.find(); while (cursor.hasNext()) { DBObject dbOccasion = cursor.next(); Occasion occasion = new Occasion(dbOccasion); try { // TODO: There was a comment here about how we should mark the event as // popped in the database, which will have different meaning for // one-time or interval occasions. Need to re-visit this. orchestrator.scheduleOccasion(occasion); } catch (Throwable t) { logger.log(Level.WARNING, "Could not schedule occasion at startup", t); } } logger.exiting(clazz, method); }
public void testFindOne() throws UnknownHostException { MarcMongodbClient client = new MarcMongodbClient("localhost" , 27017, "sub_last_print"); DBCollection collection = client.getCollection("marc"); BasicDBObject doc = createTestObject(); collection.insert(doc); assertEquals(1, collection.count()); DBObject myDoc = collection.findOne(); assertEquals("MongoDB", myDoc.get("name")); assertEquals("database", myDoc.get("type")); assertEquals(1, myDoc.get("count")); assertEquals(BasicDBObject.class, myDoc.get("info").getClass()); assertEquals(new BasicDBObject("x", 203).append("y", 102), myDoc.get("info")); assertEquals(203, ((BasicDBObject)myDoc.get("info")).get("x")); assertEquals(Integer.class, ((BasicDBObject)myDoc.get("info")).get("x").getClass()); System.out.println(myDoc); collection.remove(new BasicDBObject("name", "MongoDB")); }
public synchronized void testImport() throws URISyntaxException, IOException, InterruptedException { MarcMongodbClient client = new MarcMongodbClient("localhost" , 27017, "sub_last_print"); DBCollection collection = client.getCollection("marc"); assertEquals(0, collection.count()); boolean insert = true; if (insert) { JsonPathCache<? extends XmlFieldInstance> cache; List<String> records = FileUtils.readLines("general/marc.json"); for (String record : records) { cache = new JsonPathCache<>(record); Object jsonObject = jsonProvider.parse(record); String id = cache.get("$.controlfield.[?(@.tag == '001')].content").get(0).getValue(); String x003 = cache.get("$.controlfield.[?(@.tag == '003')].content").get(0).getValue(); BasicDBObject doc = new BasicDBObject("type", "marcjson") .append("id", id) .append("x003", x003) .append("record", record); collection.insert(doc); } assertEquals(674, collection.count()); } collection.remove(new BasicDBObject("type", "marcjson")); assertEquals(0, collection.count()); }
public Object selectVar(String dsKey, String sql) { SelectVo selectVo = (SelectVo) sqlParser.parse(sql); DBCollection collection = MongoSupport.getCollection(dsKey, selectVo.getTable()); // return selectVo.selectVar(collection); fix bug Object result = selectVo.selectVar(collection); if (null == result) { return result; } if (result instanceof DBObject) { // return getXCOResults((DBObject) result, null); XCO one = getXCOResults((DBObject) result, null); return selectVo.selectVarOneField(one); } else { return result; } }
public Object insert(DBCollection collection, WriteConcern writeConcern) { DBObject document = new BasicDBObject(); // 匹配_id for (int i = 0, n = columns.size(); i < n; i++) { // document.put(columns.get(i), values.get(i).getValue()); String tempColumn = columns.get(i); if (3 == tempColumn.length() && tempColumn.equals("_id")) { document.put(tempColumn, new ObjectId(values.get(i).getValue().toString())); } else { document.put(tempColumn, values.get(i).getValue()); } } log(document); // TODO: WriteConcern.ACKNOWLEDGED需要可以配置 // WriteResult result = collection.insert(document, WriteConcern.ACKNOWLEDGED); // collection.insert(document, MongoComponent.getInstance().getDefaultWriteConcern()); collection.insert(document, writeConcern); Object oid = document.get("_id"); if (null != oid) { return oid.toString(); } return null; }
@Test public void updatePojoTest() { Bson update = combine(set("user", "Jim"), set("action", Action.DELETE), // unfortunately at this point we need to provide a non generic class, so the codec is able to determine all types // remember: type erasure makes it impossible to retrieve type argument values at runtime // @todo provide a mechanism to generate non-generic class on the fly. Is that even possible ? // set("listOfPolymorphicTypes", buildNonGenericClassOnTheFly(Arrays.asList(new A(123), new B(456f)), List.class, Type.class), set("listOfPolymorphicTypes", new PolymorphicTypeList(Arrays.asList(new A(123), new B(456f)))), currentDate("creationDate"), currentTimestamp("_id")); FindOneAndUpdateOptions findOptions = new FindOneAndUpdateOptions(); findOptions.upsert(true); findOptions.returnDocument(ReturnDocument.AFTER); MongoCollection<Pojo> pojoMongoCollection = mongoClient.getDatabase("test").getCollection("documents").withDocumentClass(Pojo.class); Pojo pojo = pojoMongoCollection.findOneAndUpdate(Filters.and(Filters.lt(DBCollection.ID_FIELD_NAME, 0), Filters.gt(DBCollection.ID_FIELD_NAME, 0)), update, findOptions); assertNotNull(pojo.id); }
public DB createDatabase(String databaseName) throws MongoServiceException { try { DB db = client.getDB(databaseName); // save into a collection to force DB creation. DBCollection col = db.createCollection("foo", null); BasicDBObject obj = new BasicDBObject(); obj.put("foo", "bar"); col.insert(obj); // drop the collection so the db is empty // col.drop(); return db; } catch (MongoException e) { // try to clean up and fail try { deleteDatabase(databaseName); } catch (MongoServiceException ignore) {} throw handleException(e); } }
private void reportGauge(final String name, final Gauge gauge, final Date timestamp) { final DBCollection coll = db.getCollection("metric_gauge"); final Object value = gauge.getValue(); if (value == null) { // skip report return; } if (!String.class.equals(value.getClass())) { final GaugeEntity entity = new GaugeEntity(); entity.setName(prefix(name)); entity.setTimestamp(timestamp); entity.setValue(value); try { coll.save(entity.toDBObject()); } catch (MongoException e) { LOGGER.warn("Unable to report gauge {}", name, e); } } }
public CacheEntry getCacheEntry(String key, CacheEntry defaultValue) { DBCursor cur = null; DBCollection coll = getCollection(); BasicDBObject query = new BasicDBObject("key", key.toLowerCase()); // be sure to flush flushInvalid(coll,query); cur = coll.find(query); if (cur.count() > 0) { hits++; MongoDBCacheDocument doc = new MongoDBCacheDocument((BasicDBObject) cur.next()); doc.addHit(); //update the statistic and persist save(doc,0); return new MongoDBCacheEntry(doc); } misses++; return defaultValue; }
/** * Perform a single 'update' operation on the local object store. * * @param ref Object reference string of the object to be written. * @param version Expected version number of object before updating. * @param obj JSON string encoding the object to be written. * @param collection Collection to put to. * * @return an UpdateResultDesc object describing the success or failure of * the operation. */ private UpdateResultDesc doUpdate(String ref, int version, String obj, DBCollection collection) { String failure = null; boolean atomicFailure = false; if (obj == null) { failure = "no object data given"; } else { try { DBObject objectToWrite = jsonLiteralToDBObject(obj, ref); DBObject query = new BasicDBObject(); query.put("ref", ref); query.put("version", version); WriteResult result = collection.update(query, objectToWrite, false, false); if (result.getN() != 1) { failure = "stale version number on update"; atomicFailure = true; } } catch (Exception e) { failure = e.getMessage(); } } return new UpdateResultDesc(ref, failure, atomicFailure); }
/** * Perform a single 'query' operation on the local object store. * * @param template Query template indicating what objects are sought. * @param collection Collection to query. * @param maxResults Maximum number of result objects to return, or 0 to * indicate no fixed limit. * * @return a list of ObjectDesc objects for objects matching the query. */ private List<ObjectDesc> doQuery(JSONObject template, DBCollection collection, int maxResults) { List<ObjectDesc> results = new LinkedList<ObjectDesc>(); try { DBObject query = jsonObjectToDBObject(template); DBCursor cursor; if (maxResults > 0) { cursor = collection.find(query, null, 0, -maxResults); } else { cursor = collection.find(query); } for (DBObject dbObj : cursor) { JSONObject jsonObj = dbObjectToJSONObject(dbObj); String obj = jsonObj.sendableString(); results.add(new ObjectDesc("query", obj, null)); } } catch (Exception e) { results.add(new ObjectDesc("query", null, e.getMessage())); } return results; }
public void addRate(Rating rating) { DBCollection collection = MongoFactory.getDB().getCollection(collectionName); DBObject query = new BasicDBObject(FIELD_ID, new ObjectId(getId())); ratings.add(rating); // Update module's average double average = 0; for (Rating r : ratings) { average += r.getRate(); } average = average / ratings.size(); DBObject toSet = new BasicDBObject(); toSet.put(FIELD_RATINGS, ratings.toDBObject()); toSet.put(FIELD_AVERAGE_RATE, ((double) average)); collection.update(query, new BasicDBObject("$set", toSet), true, false); }
public void save() throws SinfonierException { DBCollection collection = MongoFactory.getDB().getCollection(collectionName); if (this.id == null) { Logger.info("Adding a new module version"); DBObject dbObj = this.toDBObject(); collection.save(dbObj); this.id = dbObj.get(FIELD_ID).toString(); } else { Logger.info("Editing module version id:" + this.getId().toString()); DBObject query = new BasicDBObject(FIELD_ID, new ObjectId(this.id)); DBObject toSet = this.toDBObject(); // Remove fields's unmodified toSet.removeField(FIELD_ID); toSet.removeField(FIELD_CREATED); toSet.removeField(FIELD_TOPOLOGIES_COUNT); toSet.removeField(FIELD_BUILD_STATUS); // Update field's update toSet.put(FIELD_UPDATED, new Date()); collection.update(query, new BasicDBObject("$set", toSet), true, false); } }
public MongoExecutingJobQueue(Config config) { super(config); // table name (Collection name) for single table setTableName(JobQueueUtils.EXECUTING_JOB_QUEUE); // create table DBCollection dbCollection = template.getCollection(); List<DBObject> indexInfo = dbCollection.getIndexInfo(); // create index if not exist if (CollectionUtils.sizeOf(indexInfo) <= 1) { template.ensureIndex("idx_jobId", "jobId", true, true); template.ensureIndex("idx_taskId_taskTrackerNodeGroup", "taskId, taskTrackerNodeGroup", true, true); template.ensureIndex("idx_taskTrackerIdentity", "taskTrackerIdentity"); template.ensureIndex("idx_gmtCreated", "gmtCreated"); } }
public StringBuilder getMongoDBData() { DBCollection collection = MongoUtil.getCollection("some_db", "some_collection"); DBCursor cursor = collection.find(); StringBuilder data = new StringBuilder(); long startTime = System.currentTimeMillis(); while (cursor.hasNext()) { data.append(cursor.next()); } long endTime = System.currentTimeMillis(); System.out.println("Time taken : " + (endTime - startTime)); return data; }
@ChangeSet(order = "03", author = "initiator", id = "03-addSocialUserConnection") public void addSocialUserConnection(DB db) { DBCollection socialUserConnectionCollection = db.getCollection("jhi_social_user_connection"); socialUserConnectionCollection.createIndex(BasicDBObjectBuilder .start("user_id", 1) .add("provider_id", 1) .add("provider_user_id", 1) .get(), "user-prov-provusr-idx", true); }
public static void main(String[] args) { // 新建一张表 DBCollection db = addCollection(defauleDbName, "test", null); System.out.println(db != null); // 新增一条记录 DBObject dbObject = new BasicDBObject(); dbObject.put("name", "jack"); save(defauleDbName, "test", dbObject); // 查询该记录 DBObject query = new BasicDBObject(); query.put("name", "jack"); DBObject fields = new BasicDBObject(); DBObject result = findOne(defauleDbName, "test", query, fields); System.out.println(result!=null?result.get("name"):"查询失败"); }
/** * Perform a single 'put' operation on the local object store. * * @param ref Object reference string of the object to be written. * @param obj JSON string encoding the object to be written. * @param collection Collection to put to. * * @return a ResultDesc object describing the success or failure of the * operation. */ private ResultDesc doPut(String ref, String obj, DBCollection collection, boolean requireNew) { String failure = null; if (obj == null) { failure = "no object data given"; } else { try { DBObject objectToWrite = jsonLiteralToDBObject(obj, ref); if (requireNew) { WriteResult wr = collection.insert(objectToWrite); } else { DBObject query = new BasicDBObject(); query.put("ref", ref); collection.update(query, objectToWrite, true, false); } } catch (Exception e) { failure = e.getMessage(); } } return new ResultDesc(ref, failure); }
@Override public boolean createQueue(String taskTrackerNodeGroup) { String tableName = JobQueueUtils.getExecutableQueueName(taskTrackerNodeGroup); DBCollection dbCollection = template.getCollection(tableName); List<DBObject> indexInfo = dbCollection.getIndexInfo(); // create index if not exist if (CollectionUtils.sizeOf(indexInfo) <= 1) { template.ensureIndex(tableName, "idx_jobId", "jobId", true, true); template.ensureIndex(tableName, "idx_taskId_taskTrackerNodeGroup", "taskId, taskTrackerNodeGroup", true, true); template.ensureIndex(tableName, "idx_taskTrackerIdentity", "taskTrackerIdentity"); template.ensureIndex(tableName, "idx_triggerTime_priority_gmtCreated", "triggerTime,priority,gmtCreated"); template.ensureIndex(tableName, "idx_isRunning", "isRunning"); LOGGER.info("create queue " + tableName); } EXIST_TABLE.add(tableName); return true; }
protected <T extends Model> DBCollection collection(Class<T> modelClass, boolean history) { String colName = getCollectionName(modelClass); if (history) colName = colName + "_history"; if (colName == null) { Model m = app.model(modelClass); MethodHandle mh = Reflect.getMethodHandle(modelClass, "__name", true); try { colName = (String) mh.invokeExact(m); } catch (Throwable e) { e.printStackTrace(); } } if (Str.isEmpty(colName)) throw new IllegalStateException("The model class '" + modelClass.getName() + "' must specify the collection name in the @Model annotation or in the __name() method."); return db(modelClass).getCollection(colName); }
@Override public <T extends Model> Long snapshotCount(Class<T> modelClass, Map<String, Object> filter) { if (modelClass == null) throw new DaoException("Model class cannot be null"); if (filter == null) filter = EMPTY_FILTER; DBObject dbObject = buildQueryFilter(modelClass, filter, null); DBCollection col = collection(modelClass, true); Long count = col.count(dbObject); if (count != null) { app.setQueryMetadata(QueryMetadata.builder().count(count).build()); } return count; }
@Override protected Integer execute(DBCollection collection, MongoHandle mongoHandle) throws DBException { int n = 0; try { n = collection.update(query, update, upsert, multi).getN(); } catch (Exception e) { throw new DBException(e); } return n; }
@Override protected JSONArray execute(DBCollection collection, MongoHandle mongoHandle, QuerySettings querySettings) throws DBException { Condition condition = getCondition(); return mongoHandle.getJSONs(condition, querySettings, keys); }
@Override protected DBEnumeration<JSONObject> getDBEnumerations(DBCollection collection, MongoHandle mongoHandle, QuerySettings querySettings) throws DBException { Condition condition = getCondition(); return mongoHandle.getDBEnumerations(condition, querySettings, keys); }
@Override protected Object execute(DBCollection collection, MongoHandle mongoHandle) throws DBException { try { collection.insert(dbObjects); return true; } catch (Exception e) { throw new DBException(e); } }
private int DeleteDate(SQLDeleteStatement state) { SQLTableSource table=state.getTableSource(); DBCollection coll =this._db.getCollection(table.toString()); SQLExpr expr=state.getWhere(); if (expr==null) { throw new RuntimeException("not where of sql"); } DBObject query = parserWhere(expr); coll.remove(query); return 1; }
private int dropTable(SQLDropTableStatement state) { for (SQLTableSource table : state.getTableSources()){ DBCollection coll =this._db.getCollection(table.toString()); coll.drop(); } return 1; }
@ChangeSet(order = "01", author = "initiator", id = "01-addAuthorities") public void addAuthorities(DB db) { DBCollection authorityCollection = db.getCollection("jhi_authority"); authorityCollection.insert( BasicDBObjectBuilder.start() .add("_id", "ROLE_ADMIN") .get()); authorityCollection.insert( BasicDBObjectBuilder.start() .add("_id", "ROLE_USER") .get()); }
@ChangeSet(order = "04", author = "user", id = "04-addAuthorities-2") public void addAuthorities2(DB db) { DBCollection authorityCollection = db.getCollection("jhi_authority"); authorityCollection.insert( BasicDBObjectBuilder.start() .add("_id", "ROLE_MANAGER") .get()); authorityCollection.insert( BasicDBObjectBuilder.start() .add("_id", "ROLE_SUPPORT") .get()); }
@ChangeSet(order = "05", author = "user", id = "05-addAuthorities-3") public void addAuthorities3(DB db) { DBCollection authorityCollection = db.getCollection("jhi_authority"); authorityCollection.insert( BasicDBObjectBuilder.start() .add("_id", "ROLE_ACTUATOR") .get()); }