我们从Python开源项目中,提取了以下49个代码示例,用于说明如何使用sqlalchemy.orm.scoped_session()。
def get(self): from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker, scoped_session engine = create_engine(constant.DB_ENGINE) # session_factory = sessionmaker(bind=engine) # DB_Session = scoped_session(session_factory) # DB_Session = session_factory # db_session = DB_Session() DBSession = sessionmaker(bind=engine) db_session = DBSession() query=Query(origin_lng='123', origin_lat='123', destination_lng='123', destination_lat='123', created_at=datetime.date.today()) db_session.add(query) db_session.commit() # count = self.session.query(Query).count() # self.write('{} queries so far!'.format(1))
def check_and_clear(): Session = scoped_session(sessionmaker(bind=dbengine)) dbsession = Session() log.info("db check_and_clear started with options {} {}".format(MAX_INST,DBTTL)) oldtime = datetime.datetime.today() - timedelta.relativedelta(days=DBTTL) log.info("looking for unregistered records older than {}".format(oldtime)) inst = dbsession.query(Instance).filter( Instance.state != 'added', Instance.created_at <= oldtime ).delete() try: dbsession.commit() log.info("{} records removed from db".format(len(inst))) except Exception as e: log.error(repr(e)) dbsession.rollback() dbsession.bind.dispose() Session.remove()
def create_instance(iname, ip, uuid): Session = scoped_session(sessionmaker(bind=dbengine)) dbsession = Session() inst = dbsession.query(Instance).filter( Instance.name == iname, Instance.state != 'added').first() if not inst: log.warning("instance {} was not found in db".format(iname)) new_inst = Instance(iname, uuid, ip) dbsession.add(new_inst) new_inst.state = 'added' new_inst.dns_domain = DNS_CONF["domain"] else: inst.state = 'added' inst.dns_domain = DNS_CONF["domain"] inst.uuid = uuid inst.ip = ip inst.updated_at = datetime.datetime.now() try: dbsession.commit() except Exception as e: log.error(repr(e)) dbsession.rollback() dbsession.bind.dispose() Session.remove()
def delete_instance(iname, uuid): Session = scoped_session(sessionmaker(bind=dbengine)) dbsession = Session() inst = dbsession.query(Instance).filter(Instance.name == iname).first() if not inst: log.warning("instance {} was not found in db".format(iname)) else: inst.state = 'deleted' inst.deleted_at = datetime.datetime.now() try: dbsession.commit() except Exception as e: log.error(repr(e)) dbsession.rollback() dbsession.bind.dispose() Session.remove()
def __init__(self, dbfile=DB_FILE, autocommit=False, dictrows=True, **kwargs): self.dbfile = dbfile self.autocommit = autocommit self.dictrows = dictrows self.path = "sqlite:///{0}".format(self.dbfile) echo = False if TRACE: echo = True # http://docs.sqlalchemy.org/en/latest/orm/contextual.html self.engine = create_engine(self.path, echo=echo) self.handle = sessionmaker(bind=self.engine) self.handle = scoped_session(self.handle) self._session = None self._tx_count = 0 Base.metadata.create_all(self.engine) logger.debug('database path: {}'.format(self.path)) self.clear_memcache()
def connect(self): self.build_connection_string() if self.m_connection_str == None: self.lg("Not connecting to this database", 0) return None self.lg("Connecting to databases(" + str(self.m_connection_str) + ") Autocommit(" + str(self.m_autocommit) + ") Autoflush(" + str(self.m_autoflush) + ")", 7) Base = declarative_base() self.m_engine = create_engine(self.m_connection_str, echo=False) self.m_connection = self.m_engine.connect() self.m_session = scoped_session(sessionmaker(autocommit = self.m_autocommit, autoflush = self.m_autoflush, bind = self.m_engine)) self.lg("Connected to DB(" + str(self.m_name) + ") DBTables(" + str(self.m_database_name) + ")", 7) return None # end of connect
def init_db(app): """ ???????? :param app: :return: """ database_config = app.config.get('DATABASE') engine = create_engine(database_config, convert_unicode=True, echo=False) db_session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=engine)) Base.db_session = db_session Base.query = db_session.query_property() Base.metadata.create_all(bind=engine)
def create_session(db_string, drop_tables=False): """ Creates a new DB session using the scoped_session that SQLAlchemy provides. :param db_string: The connection string. :type db_string: str :param drop_tables: Drop existing tables? :type drop_tables: bool :return: A SQLAlchemy session object :rtype: sqlalchemy.orm.scoped_session """ global db_engine, Base db_engine = create_engine(db_string, convert_unicode=True) db_session = scoped_session(sessionmaker(bind=db_engine)) Base.query = db_session.query_property() if drop_tables: Base.metadata.drop_all(bind=db_engine) Base.metadata.create_all(bind=db_engine) return db_session
def __init__(self, app, dburi, search): self.log = logging.getLogger('boartty.db') self.dburi = dburi self.search = search self.engine = create_engine(self.dburi) metadata.create_all(self.engine) self.migrate(app) # If we want the objects returned from query() to be usable # outside of the session, we need to expunge them from the session, # and since the DatabaseSession always calls commit() on the session # when the context manager exits, we need to inform the session to # expire objects when it does so. self.session_factory = sessionmaker(bind=self.engine, expire_on_commit=False, autoflush=False) self.session = scoped_session(self.session_factory) self.lock = threading.Lock()
def setup_class(cls): if not config.get('ckan.datastore.read_url'): raise nose.SkipTest('Datastore runs on legacy mode, skipping...') engine = db._get_engine( {'connection_url': config['ckan.datastore.write_url']} ) cls.Session = orm.scoped_session(orm.sessionmaker(bind=engine)) datastore_test_helpers.clear_db(cls.Session) create_tables = [ u'CREATE TABLE test_a (id_a text)', u'CREATE TABLE test_b (id_b text)', u'CREATE TABLE "TEST_C" (id_c text)', u'CREATE TABLE test_d ("?/?" integer)', ] for create_table_sql in create_tables: cls.Session.execute(create_table_sql)
def setup_class(cls): if not tests.is_datastore_supported(): raise nose.SkipTest("Datastore not supported") p.load('datastore') ctd.CreateTestData.create() cls.sysadmin_user = model.User.get('testsysadmin') cls.normal_user = model.User.get('annafan') resource = model.Package.get('annakarenina').resources[0] cls.data = { 'resource_id': resource.id, 'aliases': u'b\xfck2', 'fields': [{'id': 'book', 'type': 'text'}, {'id': 'author', 'type': 'text'}, {'id': 'rating with %', 'type': 'text'}], 'records': [{'book': 'annakarenina', 'author': 'tolstoy', 'rating with %': '90%'}, {'book': 'warandpeace', 'author': 'tolstoy', 'rating with %': '42%'}] } engine = db._get_engine( {'connection_url': config['ckan.datastore.write_url']}) cls.Session = orm.scoped_session(orm.sessionmaker(bind=engine)) set_url_type( model.Package.get('annakarenina').resources, cls.sysadmin_user)
def setup_class(cls): wsgiapp = middleware.make_app(config['global_conf'], **config) cls.app = paste.fixture.TestApp(wsgiapp) if not tests.is_datastore_supported(): raise nose.SkipTest("Datastore not supported") p.load('datastore') p.load('datapusher') p.load('test_datapusher_plugin') resource = factories.Resource(url_type='datastore') cls.dataset = factories.Dataset(resources=[resource]) cls.sysadmin_user = factories.User(name='testsysadmin', sysadmin=True) cls.normal_user = factories.User(name='annafan') engine = db._get_engine( {'connection_url': config['ckan.datastore.write_url']}) cls.Session = orm.scoped_session(orm.sessionmaker(bind=engine))
def session(*args, **kwargs): """ Returns: Session: A new SQLAlchemy :class:`Session`. Boilerplate method to create a database session. Pass in the same parameters as you'd pass to create_engine. Internally, this uses SQLAlchemy's `scoped_session` session constructors, which means that calling it again with the same parameters will reuse the `scoped_session`. :class:`S <S>` creates a session in the same way but in the form of a context manager. """ Session = get_scoped_session_maker(*args, **kwargs) return Session()
def make_session(engines, force_scope=False, info=None): if force_scope: scopefunc = scope_func else: scopefunc = None session = scoped_session( sessionmaker( class_=RoutingSession, expire_on_commit=False, engines=engines, info=info or {"name": uuid.uuid4().hex}, ), scopefunc=scopefunc ) return session
def setup_class(cls): if not pylons.config.get('ckan.datastore.read_url'): raise nose.SkipTest('Datastore runs on legacy mode, skipping...') engine = db._get_engine( {'connection_url': pylons.config['ckan.datastore.write_url']} ) cls.Session = orm.scoped_session(orm.sessionmaker(bind=engine)) datastore_test_helpers.clear_db(cls.Session) create_tables = [ u'CREATE TABLE test_a (id_a text)', u'CREATE TABLE test_b (id_b text)', u'CREATE TABLE "TEST_C" (id_c text)', u'CREATE TABLE test_d ("?/?" integer)', ] for create_table_sql in create_tables: cls.Session.execute(create_table_sql)
def __init__(self, db_url): """ Initialize the Peekaboo database handler. :param db_url: An RFC 1738 URL that points to the database. """ self.__engine = create_engine(db_url) self.__db_con = None session_factory = sessionmaker(bind=self.__engine) self.__Session = scoped_session(session_factory) self.__lock = threading.RLock() try: self.__db_con = self.__engine.connect() except SQLAlchemyError as e: raise PeekabooDatabaseError( 'Unable to connect to the database: %s' % e ) if not self.__db_con.dialect.has_table(self.__engine, '_meta'): self._init_db() logger.debug('Database schema created.') else: self.clear_in_progress()
def __init__(self, init=None): if init is None: init = {} def callback(): return scoped_session( sessionmaker(class_=RoutedSession, registry=dbs) ) self._session_factory = callback self.storage = threading.local() self.storage.lazies = {} self.storage.concrete = {} self.storage.metadatas = {} self.lazies.update(init)
def configure_orm(disable_connection_pool=False): log.debug("Setting up DB connection pool (PID %s)" % os.getpid()) global engine global Session engine_args = {} pool_connections = conf.getboolean('core', 'SQL_ALCHEMY_POOL_ENABLED') if disable_connection_pool or not pool_connections: engine_args['poolclass'] = NullPool elif 'sqlite' not in SQL_ALCHEMY_CONN: # Engine args not supported by sqlite engine_args['pool_size'] = conf.getint('core', 'SQL_ALCHEMY_POOL_SIZE') engine_args['pool_recycle'] = conf.getint('core', 'SQL_ALCHEMY_POOL_RECYCLE') engine = create_engine(SQL_ALCHEMY_CONN, **engine_args) reconnect_timeout = conf.getint('core', 'SQL_ALCHEMY_RECONNECT_TIMEOUT') setup_event_handlers(engine, reconnect_timeout) Session = scoped_session( sessionmaker(autocommit=False, autoflush=False, bind=engine))
def create_scoped_session(self, options=None): """Create a :class:`~sqlalchemy.orm.scoping.scoped_session` on the factory from :meth:`create_session`. An extra key ``'scopefunc'`` can be set on the ``options`` dict to specify a custom scope function. If it's not provided, Flask's app context stack identity is used. This will ensure that sessions are created and removed with the request/response cycle, and should be fine in most cases. :param options: dict of keyword arguments passed to session class in ``create_session`` """ if options is None: options = {} scopefunc = options.pop('scopefunc', _app_ctx_stack.__ident_func__) options.setdefault('query_cls', self.Query) return orm.scoped_session( self.create_session(options), scopefunc=scopefunc )
def __init__(self, database_url): """Set up SQL Alchemy with the database at 'database_path' database_url can be e.g. 'sqlite:////home/bob/database.db' """ self.engine = create_engine(database_url, convert_unicode=True) self.session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=self.engine)) global Base Base.query = self.session.query_property() self.setup_relationships() Base.metadata.create_all(self.engine) if len(self.session.query(User).all()) == 0: self.add_user('admin', 'admin', 'admin', 'admin', True)
def get_db_session(): """Provide a transactional scope around a series of operations.""" global session with lock: if is_sqlite: with session.no_autoflush: yield session else: try: sx_factory = sessionmaker(bind=engine, expire_on_commit=False) _sx = scoped_session(sx_factory) sx = _sx() yield sx sx.commit() except: sx.rollback() raise finally: sx.expunge_all() sx.close()
def configure_connection(connection, create_tables=True): if isinstance(connection, basestring): try: eng = create_engine(connection) except exc.ArgumentError: eng = SQLiteConnectionRecipe(connection)() elif isinstance(connection, Connectable): eng = connection elif isinstance(connection, ConnectionRecipe): eng = connection() elif isinstance(connection, scoped_session): eng = connection.get_bind() else: raise ValueError( "Could not determine how to get a database connection from %r" % connection) if create_tables: Base.metadata.create_all(bind=eng) return eng
def _index_preshot(self): logger.info('Index preshot') # BEGIN DB update conn = engine.connect() data_sess = scoped_session( sessionmaker(autocommit=False, autoflush=False, bind=conn) ) for acct in data_sess.query(Account).all(): end_bal = acct.balance.ledger pctrange = float(end_bal) * 0.1 for i in range(1, 30): dt = dtnow() - timedelta(days=i) b = end_bal + Decimal(uniform(-1 * pctrange, pctrange)) b = Decimal(b.quantize(Decimal('.001'), rounding=ROUND_HALF_UP)) acct.set_balance(ledger=b, ledger_date=dt, overall_date=dt) data_sess.flush() data_sess.commit() data_sess.close() conn.close() # END DB update self.get('/') sleep(1)
def _index_postshot(self): logger.info('Index postshot') conn = engine.connect() data_sess = scoped_session( sessionmaker(autocommit=False, autoflush=False, bind=conn) ) for acct_id in [2, 5]: acct = data_sess.query(Account).get(acct_id) s = acct.ofx_statement s.as_of = dtnow() data_sess.add(s) data_sess.flush() data_sess.commit() data_sess.close() conn.close() logger.info('Done updating DB (index postshot)')
def _balance_postshot(self): logger.info('Balance postshot') conn = engine.connect() data_sess = scoped_session( sessionmaker(autocommit=False, autoflush=False, bind=conn) ) for t in data_sess.query(Transaction).filter( Transaction.id.__ge__(4) ).all(): data_sess.delete(t) data_sess.flush() data_sess.commit() for b in data_sess.query(Budget).filter( Budget.id.__ge__(7) ).all(): data_sess.delete(b) data_sess.flush() data_sess.commit() data_sess.close() conn.close() logger.info('Done updating DB')
def _refreshdb(self): """ Refresh/Load DB data before tests """ if 'NO_REFRESH_DB' in os.environ: logger.info('Skipping session-scoped DB refresh') return # setup the connection conn = engine.connect() logger.info('Refreshing DB (session-scoped)') # clean the database biweeklybudget.models.base.Base.metadata.reflect(engine) biweeklybudget.models.base.Base.metadata.drop_all(engine) biweeklybudget.models.base.Base.metadata.create_all(engine) # load the sample data data_sess = scoped_session( sessionmaker(autocommit=False, autoflush=False, bind=conn) ) SampleDataLoader(data_sess).load() data_sess.flush() data_sess.commit() data_sess.close() conn.close() logger.info('DB refreshed.')
def _update_db(self): conn = engine.connect() logger.info('Updating DB') data_sess = scoped_session( sessionmaker(autocommit=False, autoflush=False, bind=conn) ) for t in data_sess.query(OFXTransaction).filter( OFXTransaction.account_id.__eq__(1) ).all(): if t.reconcile is not None: continue data_sess.delete(t) data_sess.flush() data_sess.commit() data_sess.close() conn.close()
def test_store_data(self): config=json.loads(open('config.json').read()) username = config['username'] password = config['password'] host = config['host'] database = 'testdb' engine = create_engine('mysql://'+username+':'+password+'@'+host+'/'+database) session = scoped_session(sessionmaker(autoflush=True,autocommit=False,bind=engine)) schema.metadata.create_all(engine) data = [{"GAME_ID":u"asdsadg", "TEAM_ID":12623}] utils.store_data(session, schema.advanced_boxscores_team, data) query = select([(schema.advanced_boxscores_team)]) rows = len(session.execute(query).fetchall()) session.remove() schema.metadata.drop_all(engine) self.assertEqual(len(data), rows)
def __init__(self, connection=None, echo=False): """ :param str connection: SQLAlchemy :param bool echo: True or False for SQL output of SQLAlchemy engine """ log.setLevel(logging.INFO) handler = logging.FileHandler(os.path.join(PYCTD_DIR, defaults.TABLE_PREFIX + 'database.log')) handler.setLevel(logging.INFO) formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') handler.setFormatter(formatter) log.addHandler(handler) try: self.connection = get_connection_string(connection) self.engine = create_engine(self.connection, echo=echo) self.inspector = reflection.Inspector.from_engine(self.engine) self.sessionmaker = sessionmaker(bind=self.engine, autoflush=False, expire_on_commit=False) self.session = scoped_session(self.sessionmaker)() except: self.set_connection_string_by_user_input() self.__init__()
def __init__(self): handlers = [ (r"/user/regist",RegisterHandler), (r"/user/login",loginHandler), (r"/user/getprofile",Userinfo), (r"/user/editprofile",Usersetting), (r"/activity/publish",ActivityPublish), (r"/activity/ask",ActivityaskHandler), (r"/activity/operation01",Activitylike), (r"/activity/comment",Activitycomment), (r"/activity/search",Activitysearch), ] tornado.web.Application.__init__(self, handlers) self.db = scoped_session(sessionmaker(bind=engine, autocommit=False, autoflush=True, expire_on_commit=False)) # session?????????????????????? Session??????,??sessionmaker?????????
def init(db_url, db_log_flag = False, recycle_time = 3600): # ?? ?? ??? DBManager.__engine = create_engine(db_url, pool_recycle = recycle_time, echo = db_log_flag) # DATABASE Not exist case if not database_exists(DBManager.__engine.url): # Creatre Database create_database(DBManager.__engine.url) DBManager.__engine = create_engine(db_url, pool_recycle = recycle_time, echo = db_log_flag) DBManager.__session = scoped_session(sessionmaker(autocommit = False, autoflush = False, bind = DBManager.__engine)) # ?? ??? ?? global dao dao = DBManager.__session
def setup_session(self): db_string = pyfusion.config.get('global', 'database') if db_string.lower() != 'none': from sqlalchemy import create_engine, MetaData from sqlalchemy.orm import scoped_session, sessionmaker, clear_mappers self.engine = create_engine(pyfusion.config.get('global', 'database')) self.Session = scoped_session(sessionmaker(autocommit=False, autoflush=True, bind=self.engine, expire_on_commit=False)) self.metadata = MetaData() self.metadata.bind = self.engine self.metadata.create_all() self.IS_ACTIVE = True # for use in tests self.clear_mappers = clear_mappers
def init_db(): """Initialize function for the database. """ global _scoped_session global _session_cls global _engine global OrmModelBase _engine = create_engine(connect_string(), pool_size=configuration.database.pool_size, echo=configuration.database.echo) _session_cls = sessionmaker(autocommit=False, autoflush=False, bind=_engine) _scoped_session = scoped_session(_session_cls) import uchan.lib.ormmodel
def _setup_connection(self): """Ensure database is ready to fly.""" global Session # pylint: disable=global-statement import homeassistant.components.recorder.models as models from sqlalchemy import create_engine from sqlalchemy.orm import scoped_session from sqlalchemy.orm import sessionmaker if self.db_url == 'sqlite://' or ':memory:' in self.db_url: from sqlalchemy.pool import StaticPool self.engine = create_engine( 'sqlite://', connect_args={'check_same_thread': False}, poolclass=StaticPool) else: self.engine = create_engine(self.db_url, echo=False) models.Base.metadata.create_all(self.engine) session_factory = sessionmaker(bind=self.engine) Session = scoped_session(session_factory) self.db_ready.set()
def configure_dbsession(config): from snovault import DBSESSION settings = config.registry.settings DBSession = settings.pop(DBSESSION, None) if DBSession is None: engine = configure_engine(settings) if asbool(settings.get('create_tables', False)): from snovault.storage import Base Base.metadata.create_all(engine) import snovault.storage import zope.sqlalchemy from sqlalchemy import orm DBSession = orm.scoped_session(orm.sessionmaker(bind=engine)) zope.sqlalchemy.register(DBSession) snovault.storage.register(DBSession) config.registry[DBSESSION] = DBSession
def __wrapped_call__(self, model, session, *values, create=True, **named_values_and_defaults): assert session is None or isinstance(session, ( Session, scoped_session)), 'If provided, session should be an sqlalchemy session object.' # compute filter values (includes call to related models) values, defaults = self.apply_filters(self.filters, values, create=create, session=session, **named_values_and_defaults) # get cache dictionary and key cache = _get_cache_dict(model, session) cache_key = (model,) + tuple(values.items()) # if no cache, delegate to real (decorated) Getter method if not cache_key in cache: # wrapped method call cache[cache_key] = self.getter(model, session, values) if session else None # store cache if cache[cache_key] is None: cache[cache_key] = model(**values, **defaults) if create else None # tie object to session, if available if cache[cache_key] and session: session.add(cache[cache_key]) return cache[cache_key]
def rowmethod(f): @functools.wraps(f) def method(cls_or_self, session, *args, **kwargs): if not isinstance(session, (Session, scoped_session)): raise ValueError('Model methods must take a session as second argument, got {}.'.format(repr(session))) return f(cls_or_self, session, *args, **kwargs) return method
def create_sessionmaker(self, engine): def sessionmaker(): return scoped_session( sqlalchemy.orm.sessionmaker(bind=engine, **self.sessionmaker_options) ) return sessionmaker
def test_session_builder(): db = MemoryDatabase() with db() as session: assert isinstance(session, scoped_session)
def make_session(transaction=True, autoflush=False, autocommit=False): # Yeah the arguments and their naming is so terrible. sorry config = ConfigParser.ConfigParser() config.read('production.ini') db_url = os.environ.get("FANTASYDOTA_DB") engine = create_engine(db_url, echo=False) if transaction: DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension())) else: DBSession = sessionmaker(autoflush=autoflush, autocommit=autocommit) DBSession.configure(bind=engine) Base.metadata.bind = engine Base.metadata.create_all(engine) session = DBSession() return session
def create_scoped_session(self, options=None): """Helper factory method that creates a scoped session. It internally calls :meth:`create_session`. """ if options is None: options = {} scopefunc = options.pop('scopefunc', None) return orm.scoped_session(partial(self.create_session, options), scopefunc=scopefunc)
def get_session(config, scoped=False, engine=None, path=None): if not engine: engine = get_engine(config, path=path) session_factory = sessionmaker( bind=engine, autoflush=True, autocommit=False, ) if scoped: ScopedSession = scoped_session(session_factory) return ScopedSession else: return session_factory
def get_session(self, force=False): if self.session and not force: return self.session session_factory=sessionmaker(bind=self.get_metadata(force=force).bind.engine) Session = scoped_session(session_factory) self.session=session=Session() return session
def __init__(self): self.engine = create_engine(args.db, echo=args.debug) self.session = scoped_session(sessionmaker(bind=self.engine)) self.Model = declarative_base(bind=self.engine)