我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用google.appengine.api.memcache.set()。
def save_auth_tokens(token_dict, user=None): """Associates the tokens with the current user and writes to the datastore. If there us no current user, the tokens are not written and this function returns None. Returns: The key of the datastore entity containing the user's tokens, or None if there was no current user. """ if user is None: user = users.get_current_user() if user is None: return None memcache.set('gdata_pickled_tokens:%s' % user, pickle.dumps(token_dict)) user_tokens = TokenCollection.all().filter('user =', user).get() if user_tokens: user_tokens.pickled_tokens = pickle.dumps(token_dict) return user_tokens.put() else: user_tokens = TokenCollection( user=user, pickled_tokens=pickle.dumps(token_dict)) return user_tokens.put()
def load_auth_tokens(user=None): """Reads a dictionary of the current user's tokens from the datastore. If there is no current user (a user is not signed in to the app) or the user does not have any tokens, an empty dictionary is returned. """ if user is None: user = users.get_current_user() if user is None: return {} pickled_tokens = memcache.get('gdata_pickled_tokens:%s' % user) if pickled_tokens: return pickle.loads(pickled_tokens) user_tokens = TokenCollection.all().filter('user =', user).get() if user_tokens: memcache.set('gdata_pickled_tokens:%s' % user, user_tokens.pickled_tokens) return pickle.loads(user_tokens.pickled_tokens) return {}
def _update_memcached(cls, domain, time=3600 * 24, records=None): """ Updates memcached with the latest data from the datastore and returns that data. By default stores entries to expire after 24 hours. """ namespace = "{}|".format(domain) if not records: records = cls._get_from_datastore(domain, cls._memcache_date_offset) memcache.set( key=cls._memcache_key, namespace=namespace, value=json.dumps(records), time=time) return records
def update(cls, domain, time=0): """ Updates the memcached stats for a given domain This is used when a report is updated so that memcached has the current stats. Args: domain - str - The domain to use for the namespace time - int - The timeout for stored keys (default: 5 seconds) """ namespace = "{}|{}".format('stats', domain) for status in VALID_STATUSES: count = EmailReport.query(EmailReport.reported_domain == domain, EmailReport.status == status).count() memcache.set( key=status, namespace=namespace, value=count, time=time)
def update(cls, domain, time=0): """ Updates the memcached stats for a given domain This is used when a report is updated so that memcached has the current stats. Args: domain - str - The domain to use for the namespace time - int - The timeout for stored keys (default: 5 seconds) """ namespace = "{}|".format(domain) records = cls._get_from_datastore(domain, cls._memcache_result_count) memcache.set( key=cls._memcache_key, namespace=namespace, value=json.dumps(records), time=time)
def get_by_sid(cls, sid): """Returns a ``Session`` instance by session id. :param sid: A session id. :returns: An existing ``Session`` entity. """ data = memcache.get(sid) if not data: session = model.Key(cls, sid).get() if session: data = session.data memcache.set(sid, data) return data
def new_crash_with_backoff(cls, crash_report): """ there is a chance that we get a new crash before an issue was submitted before. """ backoff_cache_key = cls.backoff_crash_key_new_crash(crash_report) backoff_value = memcache.get(backoff_cache_key) if not backoff_value: # A task does not exist. Queue a job. memcache.set(backoff_cache_key, "in_progress") deferred.defer( GithubOrchestrator.create_issue_job, crash_report.fingerprint, _queue=GithubOrchestrator.__QUEUE__) logging.info( 'Enqueued job for new issue on GitHub for fingerprint {0}'.format(crash_report.fingerprint)) else: # task already in progress, backoff logging.info( 'A GitHub task is already in progress. Waiting to the dust to settle for fingerprint {0}' .format(crash_report.fingerprint) )
def new_comment_with_backoff(cls, crash_report): """ there is a chance that this is a hot issue, and that there are too many crashes coming in. try and use backoff, when you are posting a new comment. """ backoff_cache_key = cls.backoff_crash_key_new_comment(crash_report) backoff_value = memcache.get(backoff_cache_key) if not backoff_value: # A task does not exist. Queue a job. memcache.set(backoff_cache_key, "in_progress") deferred.defer( GithubOrchestrator.add_comment_job, crash_report.fingerprint, _queue=GithubOrchestrator.__QUEUE__) logging.info( 'Enqueued job for new comment on GitHub for fingerprint {0}'.format(crash_report.fingerprint)) else: # task already in progress, backoff logging.info( 'A GitHub task is already in progress. Waiting to the dust to settle for fingerprint {0}' .format(crash_report.fingerprint) )
def _most_recent_property( cls, name, property_name, default_value=None, serialize=lambda x: x, deserialize=lambda x: x, ttl=120): cache_key = CrashReport.recent_crash_property_key(name, property_name) most_recent_value = memcache.get(cache_key) if most_recent_value is None: most_recent = 0 most_recent_value = default_value q = CrashReport.all() q.filter('name = ', name) for entity in q.run(): in_millis = to_milliseconds(entity.date_time) if most_recent <= in_millis: most_recent = in_millis most_recent_value = serialize(entity.__getattribute__(property_name)) memcache.set(cache_key, most_recent_value, ttl) to_return = deserialize(most_recent_value) return to_return
def getAddress(longitude, latitude): gsp_key = "gps-" + str(longitude) + "," + str(latitude) resultData = memcache.get(key=gsp_key) if resultData == None: url = "https://maps.googleapis.com/maps/api/geocode/json?language=ja&sensor=false&key=" + const.GOOGLE_API_KEY + "&latlng=" + str( longitude) + "," + str(latitude) logging.debug(url) result = urlfetch.fetch( url=url, method=urlfetch.GET, headers={ } ) if result.status_code == 200: logging.debug(result.content) else: logging.debug(result.content) jsonstr = result.content jsonobj = json.loads(jsonstr) if len(jsonobj["results"]) > 0: memcache.set(key=gsp_key, value=jsonobj, time=3600) resultData = jsonobj; else: logging.debug(resultData) return resultData["results"]
def createUserData(lineId): id = lineId # entity = Entity('UserData', name=id) # entity.update({ # 'registrationTime': datetime.datetime.now().strftime("%Y/%m/%d %H:%M:%S"), # 'lineId': 'dummy-line-id', # 'dashId': id, # }) # datastore.Put(entity) userData = UserData( key_name=id, lineId=id, dashId='not_registered_yet', registrationTime=datetime.datetime.now().strftime("%Y/%m/%d %H:%M:%S"), message=u'??????????????', ) userData.put() _setCurrentUser(id) # memcache.set(key = "USER-"+id,value="DUMMY")
def getSkel( self ): """ Returns a matching :class:`server.db.skeleton.Skeleton` instance for the current query. Its only possible to use this function if this query has been created using :func:`server.skeleton.Skeleton.all`. :returns: The Skeleton or None if the result-set is empty. :rtype: :class:`server.skeleton.Skeleton` """ if self.srcSkel is None: raise NotImplementedError("This query has not been created using skel.all()") res = self.get() if res is None: return( None ) #s = self.srcSkel.clone() self.srcSkel.setValues( res, key=res.key() ) return self.srcSkel
def __setitem__(self, name, value): """ Implements the [] operator. Used to set property value(s). :param name: Name of the property to set. :type name: str :param value: Any value to set tot the property. :raises: :exc:`BadPropertyError` if the property name is the \ empty string or not a string. :raises: :exc:`BadValueError` if the value is not a supported type. """ if isinstance(value,list) or isinstance(value,tuple): # We cant store an empty list, so we catch any attempts # and store None. As "does not exists" queries aren't # possible anyway, this makes no difference if len( value ) == 0: value = None super( Entity, self ).__setitem__( name, value )
def set( self, key, value, indexed=True ): """ Sets a property. :param key: key of the property to set. :type key: str :param value: Any value to set tot the property. :param indexed: Defines if the value is indexed. :type indexed: bool :raises: :exc:`BadPropertyError` if the property name is the \ empty string or not a string. :raises: :exc:`BadValueError` if the value is not a supported type. """ if not indexed: unindexed = list( self.getUnindexedProperties() ) if not key in unindexed: self.setUnindexedProperties( unindexed+[key] ) self[ key ] = value
def __getitem__(self, key): currTime = datetime.now() if currTime>self.ctime+self.updateInterval: data = memcache.get( self.keyName ) if data: #Loaded successfully from Memcache self.data.update( data ) self.ctime = currTime else: data = SharedConf.SharedConfData.get_by_key_name( self.keyName ) if data: for k in data.dynamic_properties(): self.data[ k ] = getattr( data, k ) else: #There isnt any config in the db nor the memcache data = SharedConf.SharedConfData( key_name=self.keyName ) for k,v in self.data.items(): #Initialize the DB-Config setattr( data, k, v ) data.put() memcache.set( self.keyName, self.data, 60*60*24 ) return( self.data[ key ] )
def add_values(): # [START add_values] # Add a value if it doesn't exist in the cache # with a cache expiration of 1 hour. memcache.add(key="weather_USA_98105", value="raining", time=3600) # Set several values, overwriting any existing values for these keys. memcache.set_multi( {"USA_98115": "cloudy", "USA_94105": "foggy", "USA_94043": "sunny"}, key_prefix="weather_", time=3600 ) # Atomically increment an integer value. memcache.set(key="counter", value=0) memcache.incr("counter") memcache.incr("counter") memcache.incr("counter") # [END add_values]
def CreateXsrfToken(action): """Generate a token to be passed with a form for XSRF protection. Args: action: action to restrict token to Returns: suitably random token which is only valid for ten minutes and, if the user is authenticated, is only valid for the user that generated it. """ user_str = _MakeUserStr() token = base64.b64encode( ''.join(chr(int(random.random()*255)) for _ in range(0, 64))) memcache.set(token, (user_str, action), time=XSRF_VALIDITY_TIME, namespace=MEMCACHE_NAMESPACE) return token
def _SetValue(self, key, type_, value): """Convert a string value and store the result in memcache. Args: key: String type_: String, describing what type the value should have in the cache. value: String, will be converted according to type_. Returns: Result of memcache.set(key, converted_value). True if value was set. Raises: ValueError: Value can't be converted according to type_. """ for _, converter, typestr in self.TYPES: if typestr == type_: value = converter(value) break else: raise ValueError('Type %s not supported.' % type_) return memcache.set(key, value)
def get_access_token(force=False): """Tries to obtain access token from memcache and, if it fails, obtains a new set and stores in memcache. See https://dev.twitter.com/oauth/application-only. Deleting the memcache key `access_token` will trigger a token refresh. """ token = memcache.get('access_token') if force or token is None: logging.warning('Needed to fetch access_token') encoded_key = urllib.quote_plus(CUSTOMER_KEY) encoded_secret = urllib.quote_plus(CUSTOMER_SECRET) encoded_credentials = base64.b64encode( "{}:{}".format(encoded_key, encoded_secret)) response = urlfetch.fetch( 'https://api.twitter.com/oauth2/token', payload='grant_type=client_credentials', method=urlfetch.POST, headers={'Authorization': 'Basic ' + encoded_credentials}) if response.status_code == urlfetch.httplib.OK: response_data = json.loads(response.content) token = response_data['access_token'] memcache.set('access_token', token, 2592000) # 30 days return token
def add_user_message(self, kind, msg, detail='', time=15 * 60): """ Add a message to the current user to memcache. """ if self.facebook.uid: key = 'messages:%s' % self.facebook.uid self._messages = memcache.get(key) message = { 'kind': kind, 'message': msg, 'detail': detail, } if self._messages is not None: self._messages.append(message) else: self._messages = [message] memcache.set(key, self._messages, time=time)
def get_request_token(user, callback): ''' Get request token ''' from settings import secrets client = EvernoteClient( consumer_key=secrets.EVERNOTE_CONSUMER_KEY, consumer_secret=secrets.EVERNOTE_CONSUMER_SECRET, sandbox=SANDBOX ) request_token = client.get_request_token(callback) logging.debug(request_token) # Save secret memcache.set(SECRET_MCK % user.key.id(), request_token['oauth_token_secret']) authorize_url = client.get_authorize_url(request_token) return authorize_url
def _goals_request(self): [annual, monthly, longterm] = Goal.Current(self.user) speech = None g = None if monthly: g = monthly speech = "Goals for %s. " % datetime.strftime(g.date, "%B %Y") elif annual: g = annual speech = "Goals for %s. " % g.date.year if g: if g.text: for i, text in enumerate(g.text): speech += "%d: %s. " % (i+1, text) else: speech = "No goals yet" else: speech = "You haven't set up any goals yet. " + GOAL.SET_INFO return speech
def set(self, url, content): try: memcache.set(url, content, time=int(self._max_age), namespace=NAMESPACE) except Exception as e: logging.warning(e, exc_info=True)
def post(self): if pubsub_utils.SUBSCRIPTION_UNIQUE_TOKEN != self.request.get('token'): self.response.status = 404 return # Store the message in the datastore. message = json.loads(urllib.unquote(self.request.body).rstrip('=')) message_body = base64.b64decode(str(message['message']['data'])) message = message_body.split(',') d = datetime.strptime(message[0][:-5],'%Y-%m-%dT%H:%M:%S') timestamp = time.mktime(d.timetuple()) message = message[1:] entities = zip(message[::2],map(int,message[1::2])) data_raw = memcache.get(MC_OSCARS_TOP10) if data_raw: data = json.loads(memcache.get(MC_OSCARS_TOP10)) else: data = None if data is None or data['timestamp'] < timestamp: memcache.set(MC_OSCARS_TOP10,json.dumps({ 'timestamp': timestamp, 'entities': entities }))
def query_or_cache(entity, start, stop, force = False): if force: entities = TwitterEntityFreq.query( TwitterEntityFreq.entity == entity, TwitterEntityFreq.timestamp > start, TwitterEntityFreq.timestamp <= stop ).order(TwitterEntityFreq.timestamp) data = [MockTWE(i) for i in entities] memcache.set(MC_GUARD%entity,1,30) memcache.set(MC_KEY%entity, data) return [i for i in data] nq = memcache.get(MC_GUARD%entity) data = memcache.get(MC_KEY%entity) td = timedelta(seconds = 30) if data: last = data[-1].timestamp if stop - last > td and not nq: entities = TwitterEntityFreq.query( TwitterEntityFreq.entity == entity, TwitterEntityFreq.timestamp > last, TwitterEntityFreq.timestamp <= stop ).order(TwitterEntityFreq.timestamp) data += [MockTWE(i) for i in entities] memcache.set(MC_GUARD%entity,1,30) memcache.set(MC_KEY%entity, data) elif not nq: entities = TwitterEntityFreq.query( TwitterEntityFreq.entity == entity, TwitterEntityFreq.timestamp > start, TwitterEntityFreq.timestamp <= stop ).order(TwitterEntityFreq.timestamp) data = [MockTWE(i) for i in entities] memcache.set(MC_GUARD%entity,1,30) memcache.set(MC_KEY%entity, data) return [i for i in data if i.timestamp > start and i.timestamp <= stop]
def run_on_appengine(gdata_service, store_tokens=True, single_user_mode=False, deadline=None): """Modifies a GDataService object to allow it to run on App Engine. Args: gdata_service: An instance of AtomService, GDataService, or any of their subclasses which has an http_client member and a token_store member. store_tokens: Boolean, defaults to True. If True, the gdata_service will attempt to add each token to it's token_store when SetClientLoginToken or SetAuthSubToken is called. If False the tokens will not automatically be added to the token_store. single_user_mode: Boolean, defaults to False. If True, the current_token member of gdata_service will be set when SetClientLoginToken or SetAuthTubToken is called. If set to True, the current_token is set in the gdata_service and anyone who accesses the object will use the same token. Note: If store_tokens is set to False and single_user_mode is set to False, all tokens will be ignored, since the library assumes: the tokens should not be stored in the datastore and they should not be stored in the gdata_service object. This will make it impossible to make requests which require authorization. deadline: int (optional) The number of seconds to wait for a response before timing out on the HTTP request. If no deadline is specified, the deafault deadline for HTTP requests from App Engine is used. The maximum is currently 10 (for 10 seconds). The default deadline for App Engine is 5 seconds. """ gdata_service.http_client = AppEngineHttpClient(deadline=deadline) gdata_service.token_store = AppEngineTokenStore() gdata_service.auto_store_tokens = store_tokens gdata_service.auto_set_current_token = single_user_mode return gdata_service
def set_token(unique_key, token_str): """Saves the serialized auth token in the datastore. The token is also stored in memcache to speed up retrieval on a cache hit. Args: unique_key: The unique name for this token as a string. It is up to your code to ensure that this token value is unique in your application. Previous values will be silently overwitten. token_str: A serialized auth token as a string. I expect that this string will be generated by gdata.gauth.token_to_blob. Returns: True if the token was stored sucessfully, False if the token could not be safely cached (if an old value could not be cleared). If the token was set in memcache, but not in the datastore, this function will return None. However, in that situation an exception will likely be raised. Raises: Datastore exceptions may be raised from the App Engine SDK in the event of failure. """ # First try to save in memcache. result = memcache.set(unique_key, token_str) # If memcache fails to save the value, clear the cached value. if not result: result = memcache.delete(unique_key) # If we could not clear the cached value for this token, refuse to save. if result == 0: return False # Save to the datastore. if Token(key_name=unique_key, t=token_str).put(): return True return None
def set(self, url, content): try: memcache.set(url, content, time=int(self._max_age), namespace=NAMESPACE) except Exception as e: LOGGER.warning(e, exc_info=True)
def set(key=None, value=None, time=TIMEOUT): parts = _split_value(json.dumps(value)) memcache.set(key=key + '_parts', value=len(parts), time=time) for i, part in enumerate(parts): logging.debug("Setting %s%d" % (key, i)) memcache.set(key='%s%d' % (key, i), value=part, time=time)
def GetEntityViaMemcache(entity_key): """Get entity from memcache if available, from datastore if not.""" entity = memcache.get(entity_key) if entity is not None: return entity key = ndb.Key(urlsafe=entity_key) entity = key.get() if entity is not None: memcache.set(entity_key, entity) return entity
def _put(self): """Saves the session and updates the memcache entry.""" memcache.set(self._key.id(), self.data) super(Session, self).put()
def save_session(self, response): if self.session is None or not self.session.modified: return memcache.set(self.sid, dict(self.session)) self.session_store.save_secure_cookie( response, self.name, {'_sid': self.sid}, **self.session_args)
def index(): """Lists the coffeez""" coffees = memcache.get(ALL_COFFEES_KEY) if not coffees: coffees = Coffee.query(Coffee.active == True).fetch() # cannot store all images into memcached due to size limits for coffee in coffees: coffee.image = None memcache.set(ALL_COFFEES_KEY, coffees) roaster_query = memcache.get(ALL_ROASTERS_KEY) if not roaster_query: roaster_query = Coffee.query(projection=["roaster"], distinct=True).fetch() memcache.set(ALL_ROASTERS_KEY, roaster_query) roasters = [data.roaster for data in roaster_query] return render_template('index.html', coffees=coffees, roasters=roasters)
def get_coffee_image(coffee_id): """Gets the image attached to the coffee""" coffee_int_id = int(coffee_id) coffee = memcache.get("coffee_image_{}".format(coffee_int_id)) if not coffee: coffee = Coffee.get_by_id(coffee_int_id) memcache.set("coffee_image_{}".format(coffee_int_id), coffee) if coffee: if coffee.image: return send_file(io.BytesIO(coffee.image)) return app.send_static_file('coffee.png')
def save(self, sync_only=False): # todo: implement sync only self._record = PicklableSession( self._expires, self._last_accessed, self._data ) memcache.set(self._sid, pickle.dumps(self._record), namespace=NAMESPACE)
def get_sharded_config(cls, name): cache_key = ShardedCounterConfig.cache_key(name) config = memcache.get(cache_key) if not config: ''' Try fetching from datastore ''' config = ShardedCounterConfig.get_or_insert(name, name=name, shards=20) memcache.set(cache_key, config, time=86400) return config
def get_count(cls, name): cache_key = CrashReport.count_cache_key(name) total = memcache.get(cache_key) if total is None: total = 0 q = CrashReport.all() q.filter('name = ', name) for entity in q.run(): total += entity.count memcache.set(cache_key, str(total)) return int(total)
def jinja2_environment(cls): # set the template search path to pages if not RRequest.environment: RRequest.environment = jinja2.Environment( loader=jinja2.FileSystemLoader(os.path.join(os.path.dirname(__file__), 'pages'))) # add readable date filter to make it available for templates RRequest.environment.filters['readable_date'] = readable_date RRequest.environment.filters['crash_uri'] = crash_uri RRequest.environment.filters['snippetize'] = snippetize RRequest.environment.filters['issue_url'] = issue_url return RRequest.environment
def getUser_MakerSecret(id): secret = memcache.get(key="MakerSecret-" + id) if secret == None: try: key = Key.from_path('UserData', id) entity = datastore.Get(key) secret = entity['maker_secret'] memcache.set(key="MakerSecret-" + id, value=secret) except: logging.debug(id + u"?IFTTT Maker Secret???????????") return secret
def setUser_MakerSecret(id, maker_secret): key = Key.from_path('UserData', id) entity = datastore.Get(key) entity.update({ 'maker_secret': maker_secret }) datastore.Put(entity) memcache.set(key="MakerSecret-" + id, value=maker_secret)
def setUserByDashIdWithCurrentUser(dashId): # if not exists, assign the dashId to lineId result = False currentUser = _getCurrentUser() if currentUser != None: # memcache.set(key = "Dash-user-"+dashid,value=currentUser, time=86400) # memcache.set(key = "User-dash-"+currentUser,value=dashid, time=86400 ) logging.debug(dashId + u"?DashButton???????????") try: lineId = currentUser key = db.Key.from_path('UserData', lineId) userData = db.get(key) userData.dashId = dashId userData.put() found_lineId = lineId _clearCurrentUser() send2Line.sendText(currentUser, "DashButton????????") result = True except: logging.warning(u"currentUser???????" + lineId + u"???????????") else: logging.warning(u"currentUser??????????????????????") return result
def __init(self): # date object storing date for self.date = None self.heading = None self.verse_reference = None self.bible_in_a_year = None self.verse_concise = None self.verse_full = None self.post = None # write get and set # link to get full verse self.link_to_full_verse_bgw = None self.link_to_full_verse_yv = None
def save(cls, media): recent_owner_ids = (memcache.get(cls.KEY_PRE_OWNER_IDS) or '').split() recent_codes = cls._get_recent_codes() codes_set = set(recent_codes) owner_ids_set = set(recent_owner_ids) new_media = [] for item in media: if not TagValidator.is_valid_tags(item['tags']): continue if item['code'] in codes_set: continue if item['owner']['id'] in owner_ids_set: continue new_media.append(item) owner_ids_set.add(item['owner']['id']) codes_set.add(item['code']) if not new_media: return 0 tag_text = cls() tag_text.text = '\n'.join(map(cls._to_line, new_media)) tag_text.put() recent_codes.extend([x['code'] for x in media]) recent_owner_ids.extend([x['owner']['id'] for x in new_media]) cls._set_cache(recent_codes, cls.MAX_CODES_COUNT, cls.KEY_RECENT_CODES) cls._set_cache(recent_owner_ids, cls.MAX_OWNER_COUNT, cls.KEY_PRE_OWNER_IDS) tags = reduce(lambda y,x: y+x['tags'], new_media, []) tags = set(tags) cls._set_last_tags(tags) return len(new_media)
def _set_last_tags(cls, tags): memcache.set('tag_text_last_tags', ' '.join(tags))