我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用pymongo.DESCENDING。
def index(self): """Get the list of objects. .. :quickref: File; Get the list of objects Response is paginated and will only contain 25 results. The most recent objects appear first. :query page: page number. :type page: int :>json list files: list of files (see :http:get:`/files/(id)` for details on the format of a file). """ page = int(request.args.get('page', 1)) files = current_user.files.find().sort('_id', DESCENDING).limit(PER_PAGE).skip((page - 1) * PER_PAGE) pagination = Pagination(page=page, per_page=PER_PAGE, total=files.count(), css_framework='bootstrap3') files = {'files': clean_files(list(files))} return render(files, 'files/index.html', ctx={'data': files, 'pagination': pagination})
def get_last_oplog_timestamp(conn,db_name): start_time = time.time() oplog=conn.local.oplog.rs; if not db_name: curr = oplog.find().sort( '$natural', pymongo.DESCENDING ).limit(1) else: #{'ns': {'$in': oplog_ns_set}} reg="^"+db_name+"\." curr = oplog.find( {'ns': re.compile(reg)} ).sort('$natural', pymongo.DESCENDING).limit(1) if curr.count(with_limit_and_skip=True) == 0: return None print_cost_time("get_last_oplog_timestamp ", start_time) return curr[0]['ts']
def get_mongos(self, force=False): if not force and self.mongos_db: return self.mongos_db elif self.db.is_mongos(): return self.db else: db = self.connection['config'] for doc in db.mongos.find().sort('ping', DESCENDING): try: mongos_uri = MongoUri(doc['_id']) logging.debug("Found cluster mongos: %s" % mongos_uri) self.mongos_db = DB(mongos_uri, self.config, False, 'nearest') logging.info("Connected to cluster mongos: %s" % mongos_uri) return self.mongos_db except DBConnectionError: logging.debug("Failed to connect to mongos: %s, trying next available mongos" % mongos_uri) raise OperationError('Could not connect to any mongos!')
def get_range_daily_data(self, code, start=None, end=None): print u'????%s????' %code # date open high close low volume amount df = fc.get_stock_daily_data(code, start, end) if df is None: return #df.to_csv(str(code)+ "daily.csv") for row in range(0, df.shape[0]) : # daily data dailybar = { 'date' : str(df.index[row].date()), 'open' : str(df.iat[row, 0]), 'high' : str(df.iat[row, 1]), 'low' : str(df.iat[row, 3]), 'close' : str(df.iat[row, 2]), 'volume' : str(df.iat[row, 4]), 'amount' : str(df.iat[row, 5]) } try: self.Daily_Db[code].insert(dailybar) except: pass self.Daily_Db[code].ensure_index([('date', pymongo.DESCENDING)])
def getTaginfo(tag): ''' ??????????????????? ''' if tag: taginfo = db['kindRecord'] result = taginfo.find({'tag': tag}, {'_id': 0, 'date': 0}).sort( 'date', pymongo.DESCENDING).limit(1) taganchor = db['Roominfo'] anchors = taganchor.find( {'tag': tag}, {'_id': 0, 'img': 0, 'date': 0, 'tag': 0}) if anchors.count() != 0: anchorsinfo = [{'anchor': item['anchor'], 'audience':item['audience'], 'roomid':item[ 'roomid'], 'roomtitle':item['roomtitle']} for item in anchors] if result.count() != 0: return result[0], anchorsinfo else: return None else: return None
def findOne(db, resultantClass, **query): ''' (pymongo.database.Database, MongoORM) -> MongoORM Creates a MongoORM directly from the Mongo database in db with query arguments in the resultantClass. ''' objectData = db[resultantClass.collection].find_one( query, # in case of tiebreakers, get the newest one sort=[('_id', DESCENDING)] ) if objectData: return resultantClass( db, resultantClass.collection, **objectData ) # non-existant objectId raise KeyError('No such document in %s' % ( str(db[resultantClass.collection]) ))
def enhance_problems_for_admin(problems): for problem in problems: query = { 'problem_id': problem['_id'] } snapshot = _db.problem_ranking_snapshots.find_one( query, sort=[('snapshot_time', pymongo.DESCENDING)]) if snapshot is None: problem["solution_count"] = 0 problem["perfect_solution_count"] = 0 else: problem["solution_count"] = len(snapshot["ranking"]) perfect_solution_count = 0 for solution in snapshot["ranking"]: if solution["resemblance_int"] == 1000000: perfect_solution_count += 1 problem["perfect_solution_count"] = perfect_solution_count
def open_spider(self, spider): logging.warning('??spider') try: self.client = pymongo.MongoClient(self.mongo_uri) self.db = self.client[self.mongo_db] except ValueError: logging.error('???????') # ????????????????? if self.mongo_col not in self.db.collection_names(): self.db[self.mongo_col].create_index( [('created_at', pymongo.DESCENDING)]) self.db[self.mongo_col].create_index( [('admin', pymongo.ASCENDING)], sparse=True) self.db[self.mongo_col].create_index( [('price', pymongo.ASCENDING)], sparse=True) self.db[self.mongo_col].create_index( [('mblogid', pymongo.ASCENDING)], unique=True) else: # ???????????????????? recent_row = list(self.db[self.mongo_col].find({'title': {'$eq': None}}, projection=['created_at'], limit=1, sort=[('created_at', pymongo.DESCENDING)])) if recent_row: self.recent = recent_row[0]['created_at'] # ???? logging.warning("???????????%s" % ( self.recent + datetime.timedelta(hours=8)).__str__())
def open_spider(self, spider): logging.warning('??spider') try: self.client = pymongo.MongoClient(self.mongo_uri) self.db = self.client[self.mongo_db] except ValueError: logging.error('???????') # ????????????????? if self.mongo_col not in self.db.collection_names(): self.db[self.mongo_col].create_index( [('created_at', pymongo.DESCENDING)]) self.db[self.mongo_col].create_index( [('admin', pymongo.ASCENDING)], sparse=True) self.db[self.mongo_col].create_index( [('price', pymongo.ASCENDING)], sparse=True) self.db[self.mongo_col].create_index( [('mblogid', pymongo.ASCENDING)], unique=True) else: # ???????????????????? recent_row = list(self.db[self.mongo_col].find({'title': {'$ne': None}}, projection=['created_at'], limit=1, sort=[('created_at', pymongo.DESCENDING)])) if recent_row: self.recent = recent_row[0]['created_at'] # ???? logging.warning("???????????%s" % ( self.recent + datetime.timedelta(hours=8)).__str__())
def open_spider(self, spider): logging.warning('??spider') try: self.client = pymongo.MongoClient(self.mongo_uri) self.db = self.client[self.mongo_db] except ValueError: logging.error('???????') # ????????????????? if self.mongo_col not in self.db.collection_names(): self.db[self.mongo_col].create_index( [('created_at', pymongo.DESCENDING)]) self.db[self.mongo_col].create_index( [('admin', pymongo.ASCENDING)], sparse=True) self.db[self.mongo_col].create_index( [('price', pymongo.ASCENDING)], sparse=True) self.db[self.mongo_col].create_index( [('mblogid', pymongo.ASCENDING)], unique=True) else: # ?????????????????? recent_row = list(self.db[self.mongo_col].find(projection=['created_at', '_id'], limit=1, sort=[('created_at', pymongo.DESCENDING)])) self.recent = recent_row[0]['created_at'] # ???? logging.warning("???????????%s"%(self.recent+datetime.timedelta(hours=8)).__str__())
def get_chat_message_list(user_id, skip=0, limit=None): '''????????????''' user_dbref = DBRef(UserDocument.meta['collection'], ObjectId(user_id)) query = { '$or': [{'sender': user_dbref}, {'recipient': user_dbref}] } cursor = ChatMessageDocument.find(query).sort( [('send_time', pymongo.DESCENDING)] ).skip(skip) if limit is not None: cursor = cursor.limit(limit) chat_message_list = yield ChatMessageDocument.to_list(cursor) chat_message_list = yield ChatMessageDocument.translate_dbref_in_document_list( chat_message_list) raise gen.Return(chat_message_list)
def get_history_messages(id_a, id_b, since): '''???????????''' limit = setting.history_messages_number_per_time user_a = DBRef(UserDocument.meta['collection'], ObjectId(id_a)) user_b = DBRef(UserDocument.meta['collection'], ObjectId(id_b)) cursor = ChatMessageDocument.find({ 'between': user_a, 'between': user_b, 'send_time': {'$lt': since} }) cursor = ChatMessageDocument.find( {'$or': [{'between': [user_a, user_b]}, {'between': [user_b, user_a]}], 'send_time': {'$lt': since}} ).sort([('send_time', pymongo.DESCENDING)]).limit(limit) result = yield ChatMessageDocument.to_list(cursor) raise gen.Return(result[::-1])
def get_like_list(share_id, skip=0, limit=None): cursor = ShareLikeDocument.find({ 'share': DBRef( ShareDocument.meta['collection'], ObjectId(share_id) ) }).sort([('like_time', pymongo.DESCENDING)]).skip(skip) if limit is not None: cursor = cursor.limit(limit) like_list = yield ShareLikeDocument.to_list(cursor) for like in like_list: like['liker'] = yield UserDocument.translate_dbref(like['liker']) raise gen.Return(like_list)
def get_topic_list_by_someone(author_id, skip=0, limit=None): '''???????''' cursor = TopicDocument.find({ 'author': DBRef( UserDocument.meta['collection'], ObjectId(author_id) ) }).sort([('publish_time', pymongo.DESCENDING)]).skip(skip) if limit is not None: cursor = cursor.limit(limit) topic_list = yield TopicDocument.to_list(cursor) for topic in topic_list: topic['author'] = yield UserDocument.translate_dbref( topic['author'] ) topic['last_comment'] = yield TopicCommentDocument.get_last_comment( topic['_id'] ) for i, node in enumerate(topic['nodes']): topic['nodes'][i] = yield NodeDocument.translate_dbref(node) raise gen.Return(topic_list)
def get_friend_list(user_id, skip=0, limit=None): '''??????????? :Parameters: - `user_id`: ???? ''' owner = DBRef(UserDocument.meta['collection'], ObjectId(user_id)) cursor = FriendDocument.find({'owner': owner}).sort( [('be_time', pymongo.DESCENDING)] ).skip(skip) if limit is not None: cursor = cursor.limit(limit) friends = yield FriendDocument.to_list(cursor) friend_list = yield FriendDocument._gen_friend_list( friends, "friend" ) raise gen.Return(friend_list)
def get_member(league_id, skip=0, limit=9): '''?????????''' cursor = LeagueMemberDocument.find({ 'league': DBRef( UserDocument.meta['collection'], ObjectId(league_id) ) }).sort([('time', pymongo.DESCENDING)]).skip(skip).limit(limit) member_list = yield LeagueMemberDocument.to_list(cursor) for member in member_list: member['member'] = yield LeagueMemberDocument.translate_dbref( member['member'] ) raise gen.Return(member_list)
def select(self, count=None, conditions=None): if count: count = int(count) else: count = 0 if conditions: conditions = dict(conditions) conditions_name = ['types', 'protocol'] for condition_name in conditions_name: value = conditions.get(condition_name, None) if value: conditions[condition_name] = int(value) else: conditions = {} items = self.proxys.find(conditions, limit=count).sort( [("speed", pymongo.ASCENDING), ("score", pymongo.DESCENDING)]) results = [] for item in items: result = (item['ip'], item['port'], item['score']) results.append(result) return results
def setup(self): """Setting up MongoDB collections, if they not exist.""" try: db = await self.db collections = await db.collection_names() created = False if self.table_name not in collections: # create table logger.info("Creating MongoDB collection [{}]".format(self.table_name)) await db.create_collection(self.table_name) await db[self.table_name].create_index([("target_id", DESCENDING), ("post_id", DESCENDING)]) created = True # create control collection if not already created. if self.control_table_name not in collections: # create table logger.info("Creating MongoDB control data collection [{}]".format(self.control_table_name)) await db.create_collection(self.control_table_name) created = True return created except Exception as exc: logger.error("[DB] Error when setting up MongoDB collections: {}".format(exc)) return False
def fetch_existing_token_of_user(self, client_id, grant_type, user_id): data = self.collection.find_one({"client_id": client_id, "grant_type": grant_type, "user_id": user_id}, sort=[("expires_at", pymongo.DESCENDING)]) if data is None: raise AccessTokenNotFound return AccessToken(client_id=data.get("client_id"), grant_type=data.get("grant_type"), token=data.get("token"), data=data.get("data"), expires_at=data.get("expires_at"), refresh_token=data.get("refresh_token"), refresh_expires_at=data.get("refresh_expires_at"), scopes=data.get("scopes"), user_id=data.get("user_id"))
def getJudgementDetail(): jd_collection = db.JudgmentDoc_isExactlySame query = jd_collection.find({},{'_id':0,'Id':1}).sort("Id",pymongo.ASCENDING) idList = list(query) length = len(idList) collection = db.JudgementDetail query = collection.find({},{'_id':0,'Id':1}).sort("_id",pymongo.DESCENDING).limit(3) record_id = list(query) print(record_id) ii = idList.index(record_id[0]) for i in range(ii+1,ii+30000): print("%d/%d\t%s"%(i,length,idList[i]['Id'])) ret = JudgementDetail(idList[i]['Id']) if ret in err_code: print('err_code: %s'%ret) break return ret
def getJudgementDetail(): jd_collection = db.JudgmentDoc_isExactlySame query = jd_collection.find({},{'_id':0,'Id':1}).sort("Id",pymongo.ASCENDING) idList = list(query) length = len(idList) collection = db.JudgementDetail query = collection.find({},{'_id':0,'Id':1}).sort("_id",pymongo.DESCENDING).limit(10) record_id = list(query) ii = 0 for id in idList: #[:10]: ii = ii + 1 print("%d/%d\t%s"%(ii,length,id['Id'])) ret = JudgementDetail(id['Id']) if ret in ['101','102','103','104','105','107','108','109','110','199']: break
def ensure_index(cls): super().ensure_index() if not cls.COLLECTION_NAME: return collection = cls.collection() collection.create_index( [ ("is_latest", pymongo.DESCENDING) ], name="index_latest", partialFilterExpression={"is_latest": True} ) collection.create_index( [ ("model_id", pymongo.ASCENDING), ("version", pymongo.ASCENDING) ], name="index_unique_version", unique=True )
def get_history(asset_id, document_limit=10): try: cursor = app.config['DB_COLL_HISTORY'].find( {'asset_id': asset_id} ).sort( "timestamp", pymongo.DESCENDING ).limit(int(document_limit)) except pymongo.errors.AutoReconnect as e: logger.error(e.message) except Exception as e: logger.error(e.message) history = [] for document in cursor: doc = utilities.to_isoformat_datetime(document) history.append(doc) return dumps(history)
def setWeiboMongo(rst): db=get_db() cl=db['retweet'] try: maxMid=cl.find().sort("mid", pymongo.DESCENDING)[0]['mid'] except: maxMid='0' coll=[] for c in rst: dt={} cm=re.findall('comment_txt.*?/p',c)[0] dt['mid']=returnMid(c) if dt['mid']<=maxMid: break dt['url']=returnUrl(c) dt['friend']=returnFri(cm) dt['retweet']=0 coll.append(dt) if coll: cid=cl.insert(coll) print('insert',len(coll),' retweet document!') return coll
def surialert(request,task_id): report = results_db.analysis.find_one({"info.id": int(task_id)},{"suricata.alerts": 1},sort=[("_id", pymongo.DESCENDING)]) if not report: return render(request, "error.html", {"error": "The specified analysis does not exist"}) suricata = report["suricata"] if settings.MOLOCH_ENABLED: if settings.MOLOCH_BASE[-1] != "/": settings.MOLOCH_BASE = settings.MOLOCH_BASE + "/" suricata = gen_moloch_from_suri_alerts(suricata) return render(request, "analysis/surialert.html", {"analysis": report, "config": enabledconf})
def surihttp(request,task_id): report = results_db.analysis.find_one({"info.id": int(task_id)},{"suricata.http": 1},sort=[("_id", pymongo.DESCENDING)]) if not report: return render(request, "error.html", {"error": "The specified analysis does not exist"}) suricata = report["suricata"] if settings.MOLOCH_ENABLED: if settings.MOLOCH_BASE[-1] != "/": settings.MOLOCH_BASE = settings.MOLOCH_BASE + "/" suricata = gen_moloch_from_suri_http(suricata) return render(request, "analysis/surihttp.html", {"analysis": report, "config": enabledconf})
def suritls(request,task_id): report = results_db.analysis.find_one({"info.id": int(task_id)},{"suricata.tls": 1},sort=[("_id", pymongo.DESCENDING)]) if not report: return render(request, "error.html", {"error": "The specified analysis does not exist"}) suricata = report["suricata"] if settings.MOLOCH_ENABLED: if settings.MOLOCH_BASE[-1] != "/": settings.MOLOCH_BASE = settings.MOLOCH_BASE + "/" suricata = gen_moloch_from_suri_tls(suricata) return render(request, "analysis/suritls.html", {"analysis": report, "config": enabledconf})
def surifiles(request,task_id): report = results_db.analysis.find_one({"info.id": int(task_id)},{"info.id": 1,"suricata.files": 1},sort=[("_id", pymongo.DESCENDING)]) if not report: return render(request, "error.html", {"error": "The specified analysis does not exist"}) suricata = report["suricata"] if settings.MOLOCH_ENABLED: if settings.MOLOCH_BASE[-1] != "/": settings.MOLOCH_BASE = settings.MOLOCH_BASE + "/" suricata = gen_moloch_from_suri_file_info(suricata) return render(request, "analysis/surifiles.html", {"analysis": report, "config": enabledconf})
def topcookies(cmd, message, args): all_cookies = cmd.db[cmd.db.db_cfg.database].Cookies.find({}).sort('Cookies', pymongo.DESCENDING).limit(20) cookie_count = cmd.db[cmd.db.db_cfg.database].Cookies.aggregate( [{'$group': { '_id': 'cookie_counter_cursor', 'cookie_count': {'$sum': '$Cookies'} }}] ) cookie_count = list(cookie_count) cookie_count = cookie_count[0]['cookie_count'] cookie_list = [] for cookie_file in all_cookies: user = discord.utils.find(lambda x: x.id == cookie_file['UserID'], cmd.bot.get_all_members()) if user: unam = user.name else: unam = '{Unknown}' cookie_list.append([unam, cookie_file['Cookies']]) cookie_table = boop(cookie_list, ['User', 'Cookies']) top_text = f'A total of {cookie_count} cookies have been given.' response = discord.Embed(color=0xd99e82) response.add_field(name='Cookie Count', value=top_text, inline=False) response.add_field(name='Cookie Leaderboard', value=f'```bat\n{cookie_table}\n```', inline=False) await message.channel.send(embed=response)
def initialize_indexes(database): """Ensure the necessary indexes exist.""" submissions = database['submissions'] comments = database['comments'] index_id = pymongo.IndexModel('reddit_id') index_created = pymongo.IndexModel([('created', pymongo.DESCENDING)]) index_text_title_and_body = pymongo.IndexModel([('title', pymongo.TEXT), ('body', pymongo.TEXT)]) index_text_body = pymongo.IndexModel([('body', pymongo.TEXT)]) submissions.create_indexes([index_id, index_created, index_text_title_and_body]) comments.create_indexes([index_id, index_created, index_text_body])
def command_stats(self, ctx, limit: int=10): """Show most used commands.""" if limit > 20 or limit < 1: await ctx.send('no') return cur = self.cstats_coll.find().sort('uses', direction=pymongo.DESCENDING)\ .limit(limit) res = [] async for single in cur: if single.get('t'): continue name, uses = single['name'], single['uses'] res.append(f'{name}: used {uses} times') _res = '\n'.join(res) await ctx.send(f'```\n{_res}\n```')
def load_current_state(self): self.known = {} blocks = store.config_blocks.find({'botnet': self.label}) # First, we have to get each config block 'type' known for type in blocks.distinct('type'): self.known[type] = {} # This could probably be done in just one (complex) query targets = store.config_blocks.find({ 'botnet': self.label, 'type': type }).distinct('target') for target in targets: last_known = store.config_blocks.find({ 'botnet': self.label, 'type': type, 'target': target, }).sort('updated', DESCENDING).limit(1)[0] if last_known['action'] != ACTION_REMOVED: self.known[type][target] = ConfigBlock(last_known)
def post(): title = "?????" if request.method == 'POST': co = conect_db() name = request.form['name'] name = re.compile('.*{0}.*'.format(name)) result = [] if not(request.form['name']): return render_template('index.html') for i,data in enumerate(co.find({'$or':[{'name':name},{'area':name},{'aliases.name':name},{'tags.value':name}]}, sort = [('rating.count',pymongo.DESCENDING),('rating.value',pymongo.DESCENDING)])): if i <100: result.append(data) else: i = 99 break return render_template('index.html', result=result, title=title, name=request.form['name'],num=i+1) else: return redirect(url_for('index'))
def order_by_recorded_msg_count(self, limit=None): """Sort by COUNT OF RECEIVED MESSAGES""" RECEIVED_MESSAGES = 'rcv_sum' aggr_pipeline = [ { '$addFields': { RECEIVED_MESSAGES: { '$sum': [ '$' + group_data.MESSAGE_RECORDS + '.' + msg_stats_data.RECEIVE + '.' + str(type_enum) + '.' + k for k in (msg_stats_pair.TRIGGERED, msg_stats_pair.NOT_TRIGGERED) for type_enum in list(msg_type) ] } } }, { '$sort': { RECEIVED_MESSAGES: pymongo.DESCENDING } } ] if limit is not None and isinstance(limit, (int, long)): aggr_pipeline.append({ '$limit': limit }) aggr_result = list(self.aggregate(aggr_pipeline)) if len(aggr_result) > 0: return [group_data(data) for data in aggr_result] else: return [] # private
def find_one_and_update(self, filter, update, projection=None, sort=None, upsert=False, return_document=pymongo.ReturnDocument.BEFORE, **kwargs): if self._available_range == group_dict_manager_range.GLOBAL: pass elif self._available_range == group_dict_manager_range.GROUP_AND_PUBLIC: or_list = [{ pair_data.AFFILIATED_GROUP: self._group_id }, { pair_data.AFFILIATED_GROUP: PUBLIC_GROUP_ID }] if '$or' in filter: filter['$and'] = [{'$or': or_list}, {'$or': filter['$or']}] del filter['$or'] else: filter['$or'] = or_list elif self._available_range == group_dict_manager_range.GROUP_ONLY: filter[pair_data.AFFILIATED_GROUP] = self._group_id else: raise UnknownRangeError() sort_tuple = (pair_data.AFFILIATED_GROUP, pymongo.DESCENDING) if sort is None: sort = [sort_tuple] else: sort.append(sort_tuple) return super(group_dict_manager, self).find_one_and_update(filter, update, projection, sort, upsert, return_document, **kwargs)
def get_reply_data(self, keyword, kw_type=word_type.TEXT): """Return none if nothing found, else return result in pair_data class""" data_result = self.find_one({ pair_data.KEYWORD: keyword, pair_data.PROPERTIES + '.' + pair_data.DISABLED: False, pair_data.PROPERTIES + '.' + pair_data.KEYWORD_TYPE: int(kw_type) }, sort=[(pair_data.PROPERTIES + '.' + pair_data.PINNED, pymongo.DESCENDING), (pair_data.SEQUENCE, pymongo.DESCENDING)]) if data_result is not None: data_result = pair_data(data_result) if data_result.last_call is None or data_result.last_call < datetime.now() - timedelta(seconds=self._repeat_call_cd_secs): self.update({ pair_data.SEQUENCE: data_result.seq_id }, { '$inc': { pair_data.STATISTICS + '.' + pair_data.CALLED_COUNT: 1 }, '$set': { pair_data.STATISTICS + '.' + pair_data.LAST_CALL: datetime.now() } }) return pair_data(data_result)
def query(self, q_kw=None, fields=None, sort_by=None, use_iterators=True, *args, **kwargs): """ Find a set of document with condition Sometimes because of memory, cause `MemoryError` exception """ if not isinstance(sort_by, (tuple, types.NoneType)): raise TypeError('meth: query, `sort_by` keyword type error') if not isinstance(fields, dict): raise TypeError('meth: query, `fields` keyword type error') self.__connect() skip = kwargs.pop('skip', 0) limit = kwargs.pop('limit', 0) # 0 hint to get overall document args = (q_kw, ) + args kwargs['projection'] = fields sort_by = sort_by or [('_id', pymongo.DESCENDING)] cursor = self.__collection.find(*args, **kwargs).sort(sort_by) if use_iterators: return cursor.skip(skip).limit(limit) return [doc for doc in cursor]
def bulk_insert_cves(self, cve_list): products = [] for product in cve_list: splitted_product = product.split("#") data = {} data['cve_id'] = splitted_product[0] data['vendor'] = splitted_product[1] data['product'] = splitted_product[2] data['version'] = splitted_product[3] data['year'] = int(splitted_product[4]) products.append(data) # Bulk insert self.db.cve.create_index([('product', pymongo.DESCENDING)]) self.db.cve.insert_many(products) # Bulk insert the cve info dict format
def get_api_exceptions(result_limit=50, sort_direction=pymongo.DESCENDING): """ Retrieve api exceptions. Args: result_limit: the maximum number of exceptions to return. sort_direction: pymongo.ASCENDING or pymongo.DESCENDING """ db = api.common.get_conn() results = db.exceptions.find({"visible": True}).sort([("time", sort_direction)]).limit(result_limit) return list(results)
def insert_proxy(self, table_name, proxy): data = proxy.get_dict() data['save_time'] = str(time.time()) data['create_time'] = str(datetime.datetime.now()) self.db[table_name].create_index([("ip", pymongo.DESCENDING)], unique=True) try: self.db[table_name].insert(data) except BaseException,e: if "E11000 duplicate key error collection" in e.message: utils.log(str(e.message.split(' ')[12])+"????????")
def get_oplog_tail_ts(self): logging.debug("Gathering oldest 'ts' in %s oplog" % self.uri) return self.get_oplog_rs().find_one(sort=[('$natural', DESCENDING)])['ts']
def last_updated(self): doc = next(self.query(properties=[self.lu_field]).sort( [(self.lu_field, pymongo.DESCENDING)]).limit(1), None) # Handle when collection has docs but `NoneType` lu_field. return (self.lu_func[0](doc[self.lu_field]) if (doc and doc[self.lu_field]) else datetime.min)
def get_all(self): return self.user_meta.find().sort([('version', pymongo.DESCENDING)])
def get_all(self): return self.rating_meta.find().sort([('version', pymongo.DESCENDING)])
def get_all(self): return self.item_meta.find().sort([('version', pymongo.DESCENDING)])
def search_one(self, query, split_regexp, key): query = re.split(split_regexp, query) #print("Query: ", query) if len(query)>7: return None dbs = [] if "cpp" in query: dbs = [db_cpp] query.remove("cpp") elif "c++" in query: dbs = [db_cpp] query.remove("c++") elif "python" in query: dbs = [db_python3] query.remove("python") elif "python3" in query: dbs = [db_python3] query.remove("python3") else: dbs = [db_cpp, db_python3] #print("Query: '", query, "'") query = filter(lambda x: x, query) if key == "name": query = sorted(query) query = " ".join(query) #print("Query: '", query, "'") #print("dbs: ", str(dbs)) found = False for db in dbs: #print("db: ", str(db)) cursor = db.index.find({key : query}, sort=[("relevance", pymongo.DESCENDING)]) for doc in cursor: found = True need_continue = self.found_reference(db, doc) if not need_continue: break return found
def get_all_team_scores(): """ Gets the score for every team in the database. Returns: A list of dictionaries with name and score """ teams = api.team.get_all_teams() db = api.api.common.get_conn() result = [] for team in teams: team_query = db.submissions.find({'tid': team['tid'], 'eligible': True, 'correct': True}) if team_query.count() > 0: lastsubmit = team_query.sort('timestamp', direction=pymongo.DESCENDING)[0]['timestamp'] else: lastsubmit = datetime.datetime.now() score = get_score(tid=team['tid']) if score > 0: result.append({ "name": team['team_name'], "tid": team['tid'], "school": team["school"], "score": score, "lastsubmit": lastsubmit }) time_ordered = sorted(result, key=lambda entry: entry['lastsubmit']) time_ordered_time_removed = [{'name': x['name'], 'tid': x['tid'], 'school': x['school'], 'score': x['score']} for x in time_ordered] return sorted(time_ordered_time_removed, key=lambda entry: entry['score'], reverse=True)
def ensure_indexes(self): #Ensure index existence db = self.client.bitcoin collection = db.transactions collection.create_index([("source_n_id", ASCENDING)]) collection.create_index([("destination_n_id", ASCENDING)]) collection.create_index([("source", ASCENDING)]) collection.create_index([("destination", ASCENDING)]) collection.create_index([("block_id",DESCENDING)])
def last_notifications(self, last_n=5): return self.collection.find().sort( 'offset', pymongo.DESCENDING)[:last_n]