我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用google.appengine.api.memcache.get()。
def xsrf_secret_key(): """Return the secret key for use for XSRF protection. If the Site entity does not have a secret key, this method will also create one and persist it. Returns: The secret key. """ secret = memcache.get(XSRF_MEMCACHE_ID, namespace=OAUTH2CLIENT_NAMESPACE) if not secret: # Load the one and only instance of SiteXsrfSecretKey. model = SiteXsrfSecretKey.get_or_insert(key_name='site') if not model.secret: model.secret = _generate_new_xsrf_secret_key() model.put() secret = model.secret memcache.add(XSRF_MEMCACHE_ID, secret, namespace=OAUTH2CLIENT_NAMESPACE) return str(secret)
def __init__(self, scope, **kwargs): """Constructor for AppAssertionCredentials Args: scope: string or iterable of strings, scope(s) of the credentials being requested. **kwargs: optional keyword args, including: service_account_id: service account id of the application. If None or unspecified, the default service account for the app is used. """ self.scope = util.scopes_to_string(scope) self._kwargs = kwargs self.service_account_id = kwargs.get('service_account_id', None) # Assertion type is no longer used, but still in the # parent class signature. super(AppAssertionCredentials, self).__init__(None)
def locked_get(self): """Retrieve Credential from datastore. Returns: oauth2client.Credentials """ credentials = None if self._cache: json = self._cache.get(self._key_name) if json: credentials = Credentials.new_from_json(json) if credentials is None: entity = self._get_entity() if entity is not None: credentials = getattr(entity, self._property_name) if self._cache: self._cache.set(self._key_name, credentials.to_json()) if credentials and hasattr(credentials, 'set_store'): credentials.set_store(self) return credentials
def oauth2decorator_from_clientsecrets(filename, scope, message=None, cache=None): """Creates an OAuth2Decorator populated from a clientsecrets file. Args: filename: string, File name of client secrets. scope: string or list of strings, scope(s) of the credentials being requested. message: string, A friendly string to display to the user if the clientsecrets file is missing or invalid. The message may contain HTML and will be presented on the web interface for any method that uses the decorator. cache: An optional cache service client that implements get() and set() methods. See clientsecrets.loadfile() for details. Returns: An OAuth2Decorator """ return OAuth2DecoratorFromClientSecrets(filename, scope, message=message, cache=cache)
def __init__(self, request=None, response=None): """Calls the constructor of the super and does the local setup.""" super(InitHandler, self).__init__(request, response) self.client = pubsub_utils.get_client() # self._setup_topic() self._setup_subscription() # def _setup_topic(self): # """Creates a topic if it does not exist.""" # topic_name = pubsub_utils.get_full_topic_name() # try: # self.client.projects().topics().get( # topic=topic_name).execute() # except errors.HttpError as e: # if e.resp.status == 404: # self.client.projects().topics().create( # name=topic_name, body={}).execute() # else: # logging.exception(e) # raise
def _setup_subscription(self): """Creates a subscription if it does not exist.""" subscription_name = pubsub_utils.get_full_subscription_name() try: self.client.projects().subscriptions().get( subscription=subscription_name).execute() except errors.HttpError as e: if e.resp.status == 404: body = { 'topic': pubsub_utils.get_full_topic_name(), 'pushConfig': { 'pushEndpoint': pubsub_utils.get_app_endpoint_url() } } self.client.projects().subscriptions().create( name=subscription_name, body=body).execute() else: logging.exception(e) raise
def save_auth_tokens(token_dict, user=None): """Associates the tokens with the current user and writes to the datastore. If there us no current user, the tokens are not written and this function returns None. Returns: The key of the datastore entity containing the user's tokens, or None if there was no current user. """ if user is None: user = users.get_current_user() if user is None: return None memcache.set('gdata_pickled_tokens:%s' % user, pickle.dumps(token_dict)) user_tokens = TokenCollection.all().filter('user =', user).get() if user_tokens: user_tokens.pickled_tokens = pickle.dumps(token_dict) return user_tokens.put() else: user_tokens = TokenCollection( user=user, pickled_tokens=pickle.dumps(token_dict)) return user_tokens.put()
def load_auth_tokens(user=None): """Reads a dictionary of the current user's tokens from the datastore. If there is no current user (a user is not signed in to the app) or the user does not have any tokens, an empty dictionary is returned. """ if user is None: user = users.get_current_user() if user is None: return {} pickled_tokens = memcache.get('gdata_pickled_tokens:%s' % user) if pickled_tokens: return pickle.loads(pickled_tokens) user_tokens = TokenCollection.all().filter('user =', user).get() if user_tokens: memcache.set('gdata_pickled_tokens:%s' % user, user_tokens.pickled_tokens) return pickle.loads(user_tokens.pickled_tokens) return {}
def get_token(unique_key): """Searches for a stored token with the desired key. Checks memcache and then the datastore if required. Args: unique_key: str which uniquely identifies the desired auth token. Returns: A string encoding the auth token data. Use gdata.gauth.token_from_blob to convert back into a usable token object. None if the token was not found in memcache or the datastore. """ token_string = memcache.get(unique_key) if token_string is None: # The token wasn't in memcache, so look in the datastore. token = Token.get_by_key_name(unique_key) if token is None: return None return token.t return token_string
def __init__(self, scope, **kwargs): """Constructor for AppAssertionCredentials Args: scope: string or iterable of strings, scope(s) of the credentials being requested. **kwargs: optional keyword args, including: service_account_id: service account id of the application. If None or unspecified, the default service account for the app is used. """ self.scope = _helpers.scopes_to_string(scope) self._kwargs = kwargs self.service_account_id = kwargs.get('service_account_id', None) self._service_account_email = None # Assertion type is no longer used, but still in the # parent class signature. super(AppAssertionCredentials, self).__init__(None)
def locked_get(self): """Retrieve Credential from datastore. Returns: oauth2client.Credentials """ credentials = None if self._cache: json = self._cache.get(self._key_name) if json: credentials = client.Credentials.new_from_json(json) if credentials is None: entity = self._get_entity() if entity is not None: credentials = getattr(entity, self._property_name) if self._cache: self._cache.set(self._key_name, credentials.to_json()) if credentials and hasattr(credentials, 'set_store'): credentials.set_store(self) return credentials
def get_last(cls, domain, days=30): """ Gets the entries for the specified number of days. This will first query memcached, using datastore as a fallback. """ if days > cls._memcache_date_offset: return cls._get_from_datastore(domain, days) cached = memcache.get( key=cls._memcache_key, namespace='{}|'.format(domain)) if cached: records = json.loads(cached) return records return cls._update_memcached(domain)
def update(cls, date, domain, new, old=None): """ Updates a timeline record for a given day. It also attempts to update the data (if any) in memcached to prevent a full cache reload. """ record = cls.query(cls.date == date).get() if not record: print 'no record found.' record = StatusTimeline(domain=domain, date=date, statuses={}) record.put() if old: print 'Removing 1 from {}'.format(old) record.statuses[old] -= 1 print 'Updating entry on {} to {} with new value {}'.format( date, new, record.statuses[new]) record.statuses[new] += 1 record.put() # Temporary update to memcached since the data is eventually # consistent cls._update_memcached(domain, time=5)
def get(self, status): """" Gets the count of records for a given status. This function will fall back to query the database if no memcached result is returned. Args: status - str - The status to query for """ count = memcache.get(key=status, namespace=self._namespace) if count != None: return count # Update the memcache store if we hit the database Stats.update(self._domain) return EmailReport.query(EmailReport.reported_domain == self._domain, EmailReport.status == status).count()
def locked_get(self): """Retrieve Credential from datastore. Returns: oauth2client.Credentials """ if self._cache: json = self._cache.get(self._key_name) if json: return Credentials.new_from_json(json) credential = None entity = self._model.get_by_key_name(self._key_name) if entity is not None: credential = getattr(entity, self._property_name) if credential and hasattr(credential, 'set_store'): credential.set_store(self) if self._cache: self._cache.set(self._key_name, credential.to_json()) return credential
def get_by_sid(cls, sid): """Returns a ``Session`` instance by session id. :param sid: A session id. :returns: An existing ``Session`` entity. """ data = memcache.get(sid) if not data: session = model.Key(cls, sid).get() if session: data = session.data memcache.set(sid, data) return data
def open_resource(self, name): """Opens a resource from the zoneinfo subdir for reading.""" name_parts = name.lstrip('/').split('/') if os.path.pardir in name_parts: raise ValueError('Bad path segment: %r' % os.path.pardir) cache_key = 'pytz.zoneinfo.%s.%s' % (pytz.OLSON_VERSION, name) zonedata = memcache.get(cache_key) if zonedata is None: zonedata = get_zoneinfo().read('zoneinfo/' + '/'.join(name_parts)) memcache.add(cache_key, zonedata) logging.info('Added timezone to memcache: %s' % cache_key) else: logging.info('Loaded timezone from memcache: %s' % cache_key) return StringIO(zonedata)
def test_get_and_put_set_store_on_cache_retrieval(self): storage = appengine.StorageByKeyName( appengine.CredentialsModel, 'foo', 'credentials', cache=memcache) self.assertEqual(None, storage.get()) self.credentials.set_store(storage) storage.put(self.credentials) # Pre-bug 292 old_creds wouldn't have storage, and the _refresh # wouldn't be able to store the updated cred back into the storage. old_creds = storage.get() self.assertEqual(old_creds.access_token, 'foo') old_creds.invalid = True http = http_mock.HttpMock(data=BASIC_RESP) old_creds._refresh(http) new_creds = storage.get() self.assertEqual(new_creds.access_token, BASIC_TOKEN) # Verify mock. self._verify_basic_refresh(http)
def test_get_and_put_ndb(self): # Start empty storage = appengine.StorageByKeyName( appengine.CredentialsNDBModel, 'foo', 'credentials') self.assertEqual(None, storage.get()) # Refresh storage and retrieve without using storage self.credentials.set_store(storage) http = http_mock.HttpMock(data=BASIC_RESP) self.credentials._refresh(http) credmodel = appengine.CredentialsNDBModel.get_by_id('foo') self.assertEqual(BASIC_TOKEN, credmodel.credentials.access_token) self.assertEqual(credmodel.credentials.to_json(), self.credentials.to_json()) # Verify mock. self._verify_basic_refresh(http)
def test_delete_ndb(self): # Start empty storage = appengine.StorageByKeyName( appengine.CredentialsNDBModel, 'foo', 'credentials') self.assertEqual(None, storage.get()) # Add credentials to model with storage, and check equivalent # w/o storage storage.put(self.credentials) credmodel = appengine.CredentialsNDBModel.get_by_id('foo') self.assertEqual(credmodel.credentials.to_json(), self.credentials.to_json()) # Delete and make sure empty storage.delete() self.assertEqual(None, storage.get())
def test_get_and_put_mixed_db_storage_ndb_get(self): # Start empty storage = appengine.StorageByKeyName( appengine.CredentialsModel, 'foo', 'credentials') self.assertEqual(None, storage.get()) # Set DB store and refresh to add to storage self.credentials.set_store(storage) http = http_mock.HttpMock(data=BASIC_RESP) self.credentials._refresh(http) # Retrieve same key from NDB model to confirm mixing works credmodel = appengine.CredentialsNDBModel.get_by_id('foo') self.assertEqual(BASIC_TOKEN, credmodel.credentials.access_token) self.assertEqual(self.credentials.to_json(), credmodel.credentials.to_json()) # Verify mock. self._verify_basic_refresh(http)
def test_delete_db_ndb_mixed(self): # Start empty storage_ndb = appengine.StorageByKeyName( appengine.CredentialsNDBModel, 'foo', 'credentials') storage = appengine.StorageByKeyName( appengine.CredentialsModel, 'foo', 'credentials') # First DB, then NDB self.assertEqual(None, storage.get()) storage.put(self.credentials) self.assertNotEqual(None, storage.get()) storage_ndb.delete() self.assertEqual(None, storage.get()) # First NDB, then DB self.assertEqual(None, storage_ndb.get()) storage_ndb.put(self.credentials) storage.delete() self.assertNotEqual(None, storage_ndb.get()) # NDB uses memcache and an instance cache (Context) ndb.get_context().clear_cache() memcache.flush_all() self.assertEqual(None, storage_ndb.get())
def test_kwargs_are_passed_to_underlying_flow(self): decorator = appengine.OAuth2Decorator( client_id='foo_client_id', client_secret='foo_client_secret', user_agent='foo_user_agent', scope=['foo_scope', 'bar_scope'], access_type='offline', prompt='consent', revoke_uri='dummy_revoke_uri') request_handler = MockRequestHandler() decorator._create_flow(request_handler) self.assertEqual('https://example.org/oauth2callback', decorator.flow.redirect_uri) self.assertEqual('offline', decorator.flow.params['access_type']) self.assertEqual('consent', decorator.flow.params['prompt']) self.assertEqual('foo_user_agent', decorator.flow.user_agent) self.assertEqual('dummy_revoke_uri', decorator.flow.revoke_uri) self.assertEqual(None, decorator.flow.params.get('user_agent', None)) self.assertEqual(decorator.flow, decorator._tls.flow)
def __init__(self, scope, **kwargs): """Constructor for AppAssertionCredentials Args: scope: string or iterable of strings, scope(s) of the credentials being requested. **kwargs: optional keyword args, including: service_account_id: service account id of the application. If None or unspecified, the default service account for the app is used. """ self.scope = util.scopes_to_string(scope) self._kwargs = kwargs self.service_account_id = kwargs.get('service_account_id', None) # Assertion type is no longer used, but still in the parent class signature. super(AppAssertionCredentials, self).__init__(None)
def load(self): if not self._loaded: found_in_cache = memcache.get(self._sid, namespace=NAMESPACE) if found_in_cache is None: return False else: self._record = pickle.loads(found_in_cache) self._loaded = True self._expires = self._record.expires self._last_accessed = self._record.last_accessed self._data = self._record.data return True
def new_crash_with_backoff(cls, crash_report): """ there is a chance that we get a new crash before an issue was submitted before. """ backoff_cache_key = cls.backoff_crash_key_new_crash(crash_report) backoff_value = memcache.get(backoff_cache_key) if not backoff_value: # A task does not exist. Queue a job. memcache.set(backoff_cache_key, "in_progress") deferred.defer( GithubOrchestrator.create_issue_job, crash_report.fingerprint, _queue=GithubOrchestrator.__QUEUE__) logging.info( 'Enqueued job for new issue on GitHub for fingerprint {0}'.format(crash_report.fingerprint)) else: # task already in progress, backoff logging.info( 'A GitHub task is already in progress. Waiting to the dust to settle for fingerprint {0}' .format(crash_report.fingerprint) )
def new_comment_with_backoff(cls, crash_report): """ there is a chance that this is a hot issue, and that there are too many crashes coming in. try and use backoff, when you are posting a new comment. """ backoff_cache_key = cls.backoff_crash_key_new_comment(crash_report) backoff_value = memcache.get(backoff_cache_key) if not backoff_value: # A task does not exist. Queue a job. memcache.set(backoff_cache_key, "in_progress") deferred.defer( GithubOrchestrator.add_comment_job, crash_report.fingerprint, _queue=GithubOrchestrator.__QUEUE__) logging.info( 'Enqueued job for new comment on GitHub for fingerprint {0}'.format(crash_report.fingerprint)) else: # task already in progress, backoff logging.info( 'A GitHub task is already in progress. Waiting to the dust to settle for fingerprint {0}' .format(crash_report.fingerprint) )
def __init__(self): if is_appengine_local(): secrets = DEBUG_CLIENT_SECRETS else: secrets = CLIENT_SECRETS with open(secrets, 'r') as contents: secrets = json.loads(contents.read()) github_token = secrets.get(TOKEN_KEY) self.webhook_secret = secrets.get(WEBHOOK_SECRET) if is_appengine_local(): self.reporter_host = DEBUG_CRASH_REPORTER_HOST self.repo_name = '{0}/{1}'.format(DEBUG_OWNER, DEBUG_REPO) else: self.reporter_host = CRASH_REPORTER_HOST self.repo_name = '{0}/{1}'.format(OWNER, REPO) self.github_client = Github(login_or_token=github_token)
def create_comment(self, crash_report): """ Updates a crash report with the comment. """ count = CrashReport.get_count(crash_report.name) issue_number = int(crash_report.issue) comment_body = self.issue_comment(count) # get repo repository = self.github_client.get_repo(self.repo_name) issue = repository.get_issue(issue_number) # create comment comment = issue.create_comment(comment_body) return { 'issue': issue, 'comment': comment }
def _most_recent_property( cls, name, property_name, default_value=None, serialize=lambda x: x, deserialize=lambda x: x, ttl=120): cache_key = CrashReport.recent_crash_property_key(name, property_name) most_recent_value = memcache.get(cache_key) if most_recent_value is None: most_recent = 0 most_recent_value = default_value q = CrashReport.all() q.filter('name = ', name) for entity in q.run(): in_millis = to_milliseconds(entity.date_time) if most_recent <= in_millis: most_recent = in_millis most_recent_value = serialize(entity.__getattribute__(property_name)) memcache.set(cache_key, most_recent_value, ttl) to_return = deserialize(most_recent_value) return to_return