@Override public List<ApplicationDTO> getAppInfoByAppNames(List<String> names) { Aggregation aggregation = newAggregation( match(Criteria.where("appname").in(names).and("timestamp").exists(true)), sort(new Sort(DESC, "timestamp")), project("appname", "platform", "starrating", "timestamp", "comment", "authorName","url"), group("appname", "platform") .push(new BasicDBObject("author", "$authorName") .append("rate", "$starrating" ) .append("timestamp", "$timestamp") .append("comment", "$comment") .append("url", "$url") ).as("reviews"), project("appname", "platform") .and("reviews").slice(8, 0) ); //Convert the aggregation result into a List AggregationResults<ApplicationDTO> groupResults = mongoTemplate.aggregate(aggregation, Review.class, ApplicationDTO.class); return groupResults.getMappedResults(); }
private void buildFilters(BasicDBObject pushdownFilters, Map<String, List<BasicDBObject>> mergedFilters) { for (Entry<String, List<BasicDBObject>> entry : mergedFilters.entrySet()) { List<BasicDBObject> list = entry.getValue(); if (list.size() == 1) { this.filters.putAll(list.get(0).toMap()); } else { BasicDBObject andQueryFilter = new BasicDBObject(); andQueryFilter.put("$and", list); this.filters.putAll(andQueryFilter.toMap()); } } if (pushdownFilters != null && !pushdownFilters.toMap().isEmpty()) { if (!mergedFilters.isEmpty()) { this.filters = MongoUtils.andFilterAtIndex(this.filters, pushdownFilters); } else { this.filters = pushdownFilters; } } }
/** * Add a new group object to the database. Call DELETE using the id of the new mongo object. * Verify that the group no longer exists in the database * * @throws GeneralSecurityException */ @Test public void testDeleteGroup() throws IOException, GeneralSecurityException { System.out.println("\nStarting testDeleteGroup"); // Create group in database Group group = new Group(null, "testGroup", new String[] {"12345"}); BasicDBObject dbGroup = group.getDBObject(false); db.getCollection(Group.DB_COLLECTION_NAME).insert(dbGroup); group.setId(dbGroup.getObjectId(Group.DB_ID).toString()); ObjectId groupId = dbGroup.getObjectId(Group.DB_ID); // Make DELETE call with group id String url = groupServiceURL + "/" + groupId; makeConnection("DELETE", url, null, 200); // Verify that the group no longer exists in mongo BasicDBObject groupAfterDelete = (BasicDBObject) db.getCollection("groups").findOne(groupId); assertNull("The group still exists after DELETE was called", groupAfterDelete); }
public int getNextId(GridFS destDatabase) { DBCollection countersCollection = destDatabase.getDB().getCollection("counters"); DBObject record = countersCollection.findOne(new BasicDBObject("_id", "package")); if (record == null) { BasicDBObject dbObject = new BasicDBObject("_id", "package"); dbObject.append("seq", 0); countersCollection.insert(dbObject); record = dbObject; } int oldID = (int) record.get("seq"); int newID = oldID + 1; record.put("seq", newID); countersCollection.update(new BasicDBObject("_id", "package"), record); return newID; }
@Test public void checkEncryptedSubdocument() { MyBean bean = new MyBean(); MySubBean subBean = new MySubBean("sky is blue", " earth is round"); bean.secretSubBean = subBean; mongoTemplate.save(bean); MyBean fromDb = mongoTemplate.findOne(query(where("_id").is(bean.id)), MyBean.class); assertThat(fromDb.secretSubBean.nonSensitiveData, is(bean.secretSubBean.nonSensitiveData)); assertThat(fromDb.secretSubBean.secretString, is(bean.secretSubBean.secretString)); DBObject fromMongo = mongoTemplate.getCollection(MyBean.MONGO_MYBEAN).find(new BasicDBObject("_id", new ObjectId(bean.id))).next(); int expectedLength = 12 + MySubBean.MONGO_NONSENSITIVEDATA.length() + subBean.secretString.length() + 7 + MySubBean.MONGO_SECRETSTRING.length() + subBean.nonSensitiveData.length() + 7; assertCryptLength(fromMongo.get(MyBean.MONGO_SECRETSUBBEAN), expectedLength); }
/** Return an object suitable to create a new user in MongoDB. */ public BasicDBObject getDBObject(boolean includeId) { BasicDBObject user = new BasicDBObject(); if (includeId) { user.append(DB_ID, new ObjectId(id)); } user.append(JSON_KEY_USER_FIRST_NAME, firstName); user.append(JSON_KEY_USER_LAST_NAME, lastName); user.append(JSON_KEY_USER_NAME, userName); user.append(JSON_KEY_USER_TWITTER_HANDLE, twitterHandle); user.append(JSON_KEY_USER_WISH_LIST_LINK, wishListLink); user.append(JSON_KEY_USER_PASSWORD_HASH, passwordHash); user.append(JSON_KEY_USER_PASSWORD_SALT, passwordSalt); user.append(JSON_KEY_USER_TWITTER_LOGIN, isTwitterLogin); return user; }
@Test public void shouldSaveSession() throws Exception { // given MongoSession session = new MongoSession(); BasicDBObject dbSession = new BasicDBObject(); given(this.converter.convert(session, TypeDescriptor.valueOf(MongoSession.class), TypeDescriptor.valueOf(DBObject.class))).willReturn(dbSession); given(this.mongoOperations.save(dbSession, "sessions")).willReturn(Mono.just(dbSession)); // when StepVerifier.create(this.repository.save(session)) .expectNextMatches(aVoid -> { // then verify(this.mongoOperations).save(dbSession, ReactiveMongoOperationsSessionRepository.DEFAULT_COLLECTION_NAME); return true; }); }
@Override public BasicDBObject mongoSerialise() { BasicDBObject dbObject = new BasicDBObject(); dbObject.put("i", getObjectId()); dbObject.put("t", ID); dbObject.put("x", getX()); dbObject.put("y", getY()); dbObject.put("direction", getDirection().ordinal()); dbObject.put("heldItem", heldItem); dbObject.put("hp", hp); dbObject.put("action", lastAction.ordinal()); dbObject.put("holo", hologram); dbObject.put("holoStr", hologramString); dbObject.put("holoMode", lastHologramMode.ordinal()); dbObject.put("holoC", hologramColor); dbObject.put("energy", energy); if (parent != null) { dbObject.put("parent", parent.getUsername()); //Only used client-side for now } return dbObject; }
public static DBObject toDbObject(NameValues nameValues) { final BasicDBObject basicDBObject = new BasicDBObject(); nameValues.forEach(new NameValues.Foreach() { @Override public boolean forEach(String name, Object value) { basicDBObject.append(name, value); return true; } }); return basicDBObject; }
@SuppressWarnings("rawtypes") @Override public VirtualObject convert(DBObject source) { Integer rid = (Integer) source.get("rid"); Integer classId = (Integer) source.get("eClassId"); Long oid = (Long) source.get("oid"); Object featuresObject = source.get("features"); EClass eclass = platformService.getEClassForCid(classId.shortValue()); VirtualObject result = new VirtualObject(rid, classId.shortValue(), oid, eclass); if (featuresObject instanceof BasicDBObject) { Map map = (Map) featuresObject; processFeatures(map, result); } return result; }
/** * Saves an entry to file * @param entry * @param dbName usually scrapig * @return true if success */ public static boolean saveEntry(DBEntry entry, String dbName){ if(entry == null || !entry.isValid()) return false; Logger log = Logger.getLogger(DAO.class); MongoDatabase db = MongoDB.INSTANCE.getDatabase(dbName); String collectionName = getCollectionName(entry); MongoCollection collection = db.getCollection(collectionName,BasicDBObject.class); try { collection.insertOne(entry); return true; } catch (MongoWriteException ex){ if (ex.getCode() != 11000) // Ignore errors about duplicates log.error(ex.getError().getMessage()); return false; } }
@SuppressWarnings({ "rawtypes", "unchecked" }) private List<Map> countAction(DataStoreMsg msg, Map queryparmes, MongoCollection<Document> collection) { BasicDBObject query = new BasicDBObject();// output Map findparmes = (Map) queryparmes.get(DataStoreProtocol.WHERE); QueryStrategy qry = new QueryStrategy(); Map express = new LinkedHashMap(); express.put(DataStoreProtocol.FIND, findparmes); qry.concretProcessor(DataStoreProtocol.FIND, express, query); // for (Object qobj : query.keySet()) { // log.info(this, "shell in package:" + qobj.toString() + ":" + query.get(qobj)); // } log.info(this, "MongoDBDataStore countAction toJson : " + query.toJson()); long countN = collection.count(query); Map<String, Object> item = new LinkedHashMap<String, Object>(); item.put(DataStoreProtocol.COUNT, countN); List<Map> res = new ArrayList<Map>(); res.add(item); return res; }
@Override public void concretProcessor(Object key, Map elemData, List<Bson> list) { if (null == key && null == elemData) { list.add(new BasicDBObject("$project", new BasicDBObject("_id", 0).append(DataStoreProtocol.RESULT, "$" + DataStoreProtocol.RESULT))); } else { Document filterBson = new Document(); filterBson.append("_id", 0); String fileds = (String) elemData.get(DataStoreProtocol.FIELDS); if (!StringHelper.isEmpty(fileds)) { String[] filters = fileds.split(";"); for (String filter : filters) { filterBson.append(filter, 1); } } list.add(new BasicDBObject("$project", filterBson)); } }
public AgentConfigurationDatabase getAgentConfigurationByAgentId(String agentId){ System.out.println("Searching agent cfg in DB with agentId = " + agentId); logger.info("Searching host in DB with agentId = " + agentId); BasicDBObject query = new BasicDBObject(); query.put("agentId", agentId); DBCursor cursor = getAgentConfigurationTable().find(query); if (cursor.hasNext()){ logger.info("Agent cfg exists in DB with agentId = " + agentId); return this.toAgentCfgDbObject(cursor.next()); } else { logger.info("Agent cfg doesn't exists in DB with agentId = " + agentId); System.out.println("Agent cfg doesn't exists in DB with agentId = " + agentId); return null; } }
public boolean existHost(String ipAddress){ logger.info("Verifying if host with ipAddress = " + ipAddress + " exists"); System.out.println("Verifying if host with ipAddress = " + ipAddress + " exists"); BasicDBObject query = new BasicDBObject(); query.put("host", ipAddress); DBCursor cursor = getAgentTable().find(query); if (cursor.hasNext()){ logger.info("Host with ipAddress = " + ipAddress + " exists"); System.out.println("Host with ipAddress = " + ipAddress + " exists"); return true; } else { logger.info("Not exists any host with ipAddress = " + ipAddress); System.out.println("Not exists any host with ipAddress = " + ipAddress); return false; } }
public AgentFull getAgentByIpAddress(String ipAddress){ logger.info("Searching host in DB with ipAddress = " + ipAddress); System.out.println("Searching host in DB with ipAddress = " + ipAddress); AgentFull agent = null; BasicDBObject query = new BasicDBObject(); query.put("host", ipAddress); DBCursor cursor = getAgentTable().find(query); if (cursor.hasNext()){ agent = new AgentFull(); agent.setAgentId((String) cursor.next().get("agentId")); agent.setHost((String) cursor.curr().get("host")); agent.setMonitored((boolean) cursor.curr().get("monitored")); agent.setLogstashIp((String) cursor.curr().get("logstashIp")); agent.setLogstashPort((String) cursor.curr().get("logstashPort")); logger.info("Host finded in DB with ipAddress = " + ipAddress + " with ID " + agent.getAgentId()); System.out.println("Host finded in DB with ipAddress = " + ipAddress + " with ID " + agent.getAgentId()); } else { logger.error("Host doesn't exists in DB with ipAddress = " + ipAddress); System.out.println("Host doesn't exists in DB with ipAddress = " + ipAddress); return null; } return agent; }
public AgentFull getAgentByAgentId(String agentId){ System.out.println("Searching host in DB with agentId = " + agentId); logger.info("Searching host in DB with agentId = " + agentId); AgentFull agent = null; BasicDBObject query = new BasicDBObject(); query.put("agentId", agentId); DBCursor cursor = getAgentTable().find(query); if (cursor.hasNext()){ agent = new AgentFull(); agent.setAgentId((String) cursor.next().get("agentId")); agent.setHost((String) cursor.curr().get("host")); agent.setMonitored((boolean) cursor.curr().get("monitored")); agent.setLogstashIp((String) cursor.curr().get("logstashIp")); agent.setLogstashPort((String) cursor.curr().get("logstashPort")); logger.info("Host finded in DB with agentId = " + agentId + " with ipAddress " + agent.getHost()); System.out.println("Host finded in DB with agentId = " + agentId + " with ipAddress " + agent.getHost()); } else { logger.info("Host doesn't exists in DB with agentId = " + agentId); System.out.println("Host doesn't exists in DB with agentId = " + agentId); return null; } return agent; }
@Test public void testUploadFile() throws IOException { // 创建测试文件 int someone = 1000; File uploadFile = FileUtils.createTestFile(); // 文件实体 FileEntity fileEntity = new FileEntity(); fileEntity.setOwnerId(someone); // 文件元信息 DBObject metaData = new BasicDBObject(); metaData.put("info", "testUploadFile"); // 上传文件 int fileCountBeforeUpload = fileRepository.findAllByOwnerId(someone).size(); boxService.upload(fileEntity, uploadFile, metaData); int fileCountAfterUpload = fileRepository.findAllByOwnerId(someone).size(); Assert.assertEquals(fileCountBeforeUpload + 1, fileCountAfterUpload); fileRepository.deleteAllByOwnerId(someone); int fileCountAfterDelete = fileRepository.findAllByOwnerId(someone).size(); Assert.assertEquals(fileCountAfterDelete, 0); // 删除测试文件 FileUtils.removeTestFile(); }
/** * Add a new group object to the database. Call PUT using the id of the new mongo object to update * the group information (name, members list, occasions list). Verify that the group information * has been updated * * @throws GeneralSecurityException */ @Test public void testUpdateGroup() throws IOException, GeneralSecurityException { System.out.println("\nStarting testUpdateGroup"); // Create group in database Group group = new Group(null, "testGroup", new String[] {"12345", "23456"}); BasicDBObject dbGroup = group.getDBObject(false); db.getCollection(Group.DB_COLLECTION_NAME).insert(dbGroup); group.setId(dbGroup.getObjectId(Group.DB_ID).toString()); // Create updated group ObjectId groupId = dbGroup.getObjectId(Group.DB_ID); Group newGroup = new Group(groupId.toString(), "newTestGroup", new String[] {"12345"}); String url = groupServiceURL + "/" + groupId; makeConnection("PUT", url, newGroup.getJson(), 200); // Verify that the new group information is in mongo BasicDBObject newDBGroup = (BasicDBObject) db.getCollection("groups").findOne(groupId); assertNotNull("Group testGroup was not found in the database.", newDBGroup); assertTrue( "Group in database does not contain the expected data", newGroup.isEqual(newDBGroup)); }
private int InsertData(SQLInsertStatement state) { if (state.getValues().getValues().size() ==0 ){ throw new RuntimeException("number of columns error"); } if (state.getValues().getValues().size() != state.getColumns().size()){ throw new RuntimeException("number of values and columns have to match"); } SQLTableSource table=state.getTableSource(); BasicDBObject o = new BasicDBObject(); int i=0; for(SQLExpr col : state.getColumns()) { o.put(getFieldName2(col), getExpValue(state.getValues().getValues().get(i))); i++; } DBCollection coll =this._db.getCollection(table.toString()); coll.insert(new DBObject[] { o }); return 1; }
public MongoRecordReader( MongoSubScan.MongoSubScanSpec subScanSpec, List<SchemaPath> projectedColumns, FragmentContext context, MongoStoragePlugin plugin) { fields = new BasicDBObject(); // exclude _id field, if not mentioned by user. fields.put(DrillMongoConstants.ID, Integer.valueOf(0)); setColumns(projectedColumns); fragmentContext = context; this.plugin = plugin; filters = new BasicDBObject(); Map<String, List<BasicDBObject>> mergedFilters = MongoUtils.mergeFilters( subScanSpec.getMinFilters(), subScanSpec.getMaxFilters()); buildFilters(subScanSpec.getFilter(), mergedFilters); enableAllTextMode = fragmentContext.getOptions().getOption(ExecConstants.MONGO_ALL_TEXT_MODE).bool_val; readNumbersAsDouble = fragmentContext.getOptions().getOption(ExecConstants.MONGO_READER_READ_NUMBERS_AS_DOUBLE).bool_val; init(subScanSpec); }
/** * Create a Mongo DB Object baed on the content of this group * * @param id The Mongo Object id to assign to this DB Object. If null, a new Object id will be * created * @return - The Mongo DB Object based on the content of this group */ public BasicDBObject getDBObject(boolean includeId) { BasicDBObject group = new BasicDBObject(); if (includeId) { group.append(DB_ID, new ObjectId(id)); } group.append(JSON_KEY_GROUP_NAME, name); BasicDBList membersArray = new BasicDBList(); for (int i = 0; i < members.length; i++) { membersArray.add(members[i]); } group.append(JSON_KEY_MEMBERS_LIST, membersArray); return group; }
/** * This test demonstrates usage of {@code $comment} {@link Meta} usage. One can also enable profiling using * {@code --profile=2} when starting {@literal mongod}. * <p> * <strong>NOTE</strong>: Requires MongoDB v. 2.6.4+ */ @Test public void findByFirstnameUsingMetaAttributes() { // execute derived finder method just to get the comment in the profile log repository.findByFirstname(dave.getFirstname()); // execute another finder without meta attributes that should not be picked up repository.findByLastname(dave.getLastname(), new Sort("firstname")); DBCursor cursor = operations.getCollection(ApplicationConfiguration.SYSTEM_PROFILE_DB) .find(new BasicDBObject("query.$comment", AdvancedRepository.META_COMMENT)); while (cursor.hasNext()) { DBObject dbo = cursor.next(); DBObject query = (DBObject) dbo.get("query"); assertThat(query.containsField("$comment"), is(true)); } }
@Test public void checkNonEncryptedMap() { MyBean bean = new MyBean(); Map<String, MySubBean> map = new HashMap(); map.put("one", new MySubBean("sky is blue", " earth is round")); map.put("two", new MySubBean("grass is green", "earth is flat")); bean.nonSensitiveMap = map; mongoTemplate.save(bean); MyBean fromDb = mongoTemplate.findOne(query(where("_id").is(bean.id)), MyBean.class); assertThat(fromDb.nonSensitiveMap.get("one").secretString, is(bean.nonSensitiveMap.get("one").secretString)); assertThat(fromDb.nonSensitiveMap.get("one").nonSensitiveData, is(bean.nonSensitiveMap.get("one").nonSensitiveData)); assertThat(fromDb.nonSensitiveMap.get("two").secretString, is(bean.nonSensitiveMap.get("two").secretString)); assertThat(fromDb.nonSensitiveMap.get("two").nonSensitiveData, is(bean.nonSensitiveMap.get("two").nonSensitiveData)); DBObject fromMongo = mongoTemplate.getCollection(MyBean.MONGO_MYBEAN).find(new BasicDBObject("_id", new ObjectId(bean.id))).next(); DBObject mapMongo = (DBObject) fromMongo.get(MyBean.MONGO_NONSENSITIVEMAP); DBObject oneMongo = (DBObject) mapMongo.get("one"); DBObject twoMongo = (DBObject) mapMongo.get("two"); assertThat(oneMongo.get(MySubBean.MONGO_NONSENSITIVEDATA), is(map.get("one").nonSensitiveData)); assertThat(twoMongo.get(MySubBean.MONGO_NONSENSITIVEDATA), is(map.get("two").nonSensitiveData)); assertCryptLength(oneMongo.get(MySubBean.MONGO_SECRETSTRING), map.get("one").secretString.length() + 12); assertCryptLength(twoMongo.get(MySubBean.MONGO_SECRETSTRING), map.get("two").secretString.length() + 12); }
@Test public void insert_test() throws Exception { final TestRunner runner = TestRunners.newTestRunner(new StoreInMongo()); addMongoService(runner); runner.setProperty(MongoProps.DATABASE, MONGO_DATABASE_NAME); runner.setProperty(MongoProps.COLLECTION, "insert_test"); runner.enqueue("{\"a\":\"a\"}".getBytes()); runner.run(); runner.assertTransferCount(AbstractMongoProcessor.REL_FAILURE, 0); runner.assertTransferCount(AbstractMongoProcessor.REL_SUCCESS, 1); // Verify Wrapped Payload MockFlowFile out = runner.getFlowFilesForRelationship(AbstractMongoProcessor.REL_SUCCESS).get(0); BasicDBObject actual = (BasicDBObject) JSON.parse(new String(out.toByteArray(), StandardCharsets.UTF_8)); assertEquals("a", actual.getString("a")); }
@Test public void insert_refined_payload_test() throws Exception { final TestRunner runner = TestRunners.newTestRunner(new StoreInMongo()); addMongoService(runner); runner.setProperty(MongoProps.DATABASE, MONGO_DATABASE_NAME); runner.setProperty(MongoProps.COLLECTION, "insert_test"); String contents = FileUtils.readFileToString(Paths.get("src/test/resources/payload.json").toFile()); runner.enqueue(contents.getBytes()); runner.run(); runner.assertTransferCount(AbstractMongoProcessor.REL_FAILURE, 0); runner.assertTransferCount(AbstractMongoProcessor.REL_SUCCESS, 1); // Verify Wrapped Payload MockFlowFile out = runner.getFlowFilesForRelationship(AbstractMongoProcessor.REL_SUCCESS).get(0); BasicDBObject actual = (BasicDBObject) JSON.parse(new String(out.toByteArray(), StandardCharsets.UTF_8)); assertNotNull(actual.getString("d")); }
public void getWeaverInfos(Weaver weaver){ BasicDBObject basicDB = new BasicDBObject(); DBObject tempDB = weaverDao.getWeaverInfosInPost(weaver); tempDB = weaverDao.getWeaverInfosInPost(weaver); if(tempDB != null){ basicDB.put("postCount", tempDB.get("postCount")); basicDB.put("push", tempDB.get("push")); basicDB.put("rePostCount", tempDB.get("rePostCount")); } tempDB = weaverDao.getWeaverInfosInRePost(weaver); if(tempDB != null){ basicDB.put("myRePostCount", tempDB.get("myRePostCount")); basicDB.put("rePostPush", tempDB.get("rePostPush")); } tempDB = weaverDao.getWeaverInfosInRepository(weaver); if(tempDB != null){ basicDB.put("repositoryPush", tempDB.get("repositoryPush")); } tempDB = weaverDao.getWeaverInfosInLecture(weaver); if(tempDB != null){ basicDB.put("repos", tempDB.get("repos")); basicDB.put("joinWeavers", tempDB.get("joinWeavers")); } tempDB = weaverDao.getWeaverInfosInCode(weaver); if(tempDB != null){ basicDB.put("codeCount", tempDB.get("codeCount")); basicDB.put("downCount", tempDB.get("downCount")); } weaver.setWeaverInfo(basicDB); }
@Override public BasicDBObject mongoSerialise() { BasicDBObject dbObject = new BasicDBObject(); dbObject.put("i", getObjectId()); dbObject.put("x", getX()); dbObject.put("y", getY()); dbObject.put("direction", getDirection().ordinal()); dbObject.put("hp", getHp()); // dbObject.put("energy", energy); dbObject.put("action", getAction().ordinal()); dbObject.put("t", ID); return dbObject; }
@GET @Path("/") @Produces(MediaType.APPLICATION_JSON) public Response getGroups(@QueryParam("userId") String userId) { // Validate the JWT. At this point, anyone can get a group list if they // have a valid JWT. try { validateJWT(); } catch (JWTException jwte) { return Response.status(Status.UNAUTHORIZED) .type(MediaType.TEXT_PLAIN) .entity(jwte.getMessage()) .build(); } DBCursor groupCursor = null; BasicDBList groupList = new BasicDBList(); if (userId != null) { if (!ObjectId.isValid(userId)) { return Response.status(Status.BAD_REQUEST) .type(MediaType.TEXT_PLAIN) .entity("The user id provided is not valid.") .build(); } BasicDBObject queryObj = new BasicDBObject(Group.JSON_KEY_MEMBERS_LIST, userId); groupCursor = getGroupCollection().find(queryObj); } else { groupCursor = getGroupCollection().find(); } while (groupCursor.hasNext()) { groupList.add((new Group(groupCursor.next()).getJson())); } String responsePayload = (new BasicDBObject(Group.JSON_KEY_GROUPS, groupList)).toString(); return Response.ok(responsePayload).build(); }
/** * binlog行数据列转换成DBObject * * @param columns * @return */ public static BasicDBObject columnToJson(List<CanalEntry.Column> columns) { BasicDBObject obj = new BasicDBObject(); for (CanalEntry.Column column : columns) { Object value = dataTypeConvert(column.getMysqlType(), column.getValue()); obj.put(column.getName(), value); } return obj; }
public DBCursor selectSet(DBCollection collection) { DBObject fields = getFields(); DBObject query = getQuery(); DBObject orderByObject = getOrderByObject(); DBCursor cursor = null; // 日志 log(fields, query, orderByObject); if (null != query && null == fields) { cursor = collection.find(query); } else if (null == query && null != fields) { cursor = collection.find(new BasicDBObject(), fields); } else if (null != fields && null != query) { cursor = collection.find(query, fields); } else { cursor = collection.find(); } if (null != orderByObject) { cursor.sort(orderByObject); } if (null != this.limit) { if (null == this.limit.getOffset()) { cursor.limit(this.limit.getRowCount()); } else { cursor.limit(this.limit.getRowCount()); cursor.skip(this.limit.getOffset()); } } return cursor; }
@Override public ValidationResult validate(final String subject, final String value, final ValidationContext context) { String reason = null; try { Object root = JSON.parse(value); if (!(root instanceof BasicDBObject)) { reason = "not a valid JSON object"; } } catch (Exception e) { // LOGGER.debug("not a valid JSON object", e); reason = "unable to parse JSON"; } return new ValidationResult.Builder().subject(subject).input(value).explanation(reason).valid(reason == null).build(); }
@Override public Sprint getSprintForId(String id, String collectorId) { Aggregation agg = newAggregation( match(where("sSprintID").is(id).and("collectorId").is(collectorId)), firstSprintFields(group("sSprintID")) .push(new BasicDBObject(FEATURE_FIELDS)).as("features") ); AggregationResults<Sprint> aggregate = mongoTemplate.aggregate(agg, "feature", Sprint.class); return aggregate.getUniqueMappedResult(); }
public DataAddress prefetchData(DataAddress givenAddress, ServerAddress destAddress) throws IOException { logger.info("yo2"); ServerAddress givenServer = new ServerAddress(givenAddress.hostname, givenAddress.port); GridFS givenDatabase = connectToDatabase(givenServer); logger.info("yo"); GridFSDBFile givenPackage = givenDatabase.findOne(new BasicDBObject("_id", givenAddress.ID)); ByteArrayOutputStream baos = new ByteArrayOutputStream((int)givenPackage.getLength()); givenPackage.writeTo(baos); logger.info("Prefetched"); GridFS destDatabase = connectToDatabase(destAddress); GridFSInputFile destPackage = destDatabase.createFile(baos.toByteArray()); int newID = getNextId(destDatabase); logger.info("Got new id for prefetched package: " + newID); destPackage.put("_id", newID); destPackage.save(); logger.info("after save"); DataAddress ret = new DataAddress(); ret.hostname = destAddress.getHost(); ret.port = destAddress.getPort(); ret.ID = newID; return ret; }
@Override public BasicDBObject mongoSerialise() { BasicDBObject dbObject = new BasicDBObject(); dbObject.put("rwHeadTrack", rwHeadTrack); dbObject.put("memory", memory.mongoSerialise()); return dbObject; }
/** * 会进行sql注入处理. */ @Override public Object toFinalObject() throws ConditionException { DBObject baseObject = new BasicDBObject(); for (int i = 0; i < size(); i++) { Operator operator = getOperator(i); put(operator, get(i), baseObject); } return baseObject; }
@Override public boolean deleteMessage(ObjectId conversationId, String messageId) { final Query query = new Query(Criteria.where("_id").is(conversationId)); final Update update = new Update() .pull("messages", new BasicDBObject("_id", messageId)) .currentDate("lastModified"); final WriteResult result = mongoTemplate.updateFirst(query, update, Conversation.class); return result.getN() == 1; }
public GridFSFile saveGlbFile(InputStream inputStream, String fileName, int rid, double lon, double lat) { DBObject metaData = new BasicDBObject("rid", rid); metaData.put(STORE_BIM_TYPE, STORE_GLB_FILE); metaData.put("rid", rid); metaData.put("lon", lon); metaData.put("lat", lat); return gridFsTemplate.store(inputStream, fileName, metaData); }
public GridFSFile saveGlbOffline(InputStream inputStream, String fileName, Long id, double lon, double lat) { DBObject metaData = new BasicDBObject("offline", true); metaData.put("id", id); metaData.put(STORE_BIM_TYPE, STORE_GLB_FILE); metaData.put("lon", lon); metaData.put("lat", lat); return gridFsTemplate.store(inputStream, fileName, metaData); }