我们从Python开源项目中,提取了以下49个代码示例,用于说明如何使用sqlalchemy.orm.sessionmaker()。
def __init__(self): db = create_engine(DbUtil.get_python_db_connection_string(), isolation_level="READ UNCOMMITTED") if cfg.CONF.service.enable_debug_log_entries: db.echo = True # reflect the tables Base.prepare(db, reflect=True) Session = sessionmaker(bind=db) self.session = Session() # keep these many offset versions around self.MAX_REVISIONS = cfg.CONF.repositories.offsets_max_revisions
def __init__(self, engine, metadata=None, keyword='db', commit=True, create=False, use_kwargs=False, create_session=None): ''' :param engine: SQLAlchemy engine created with `create_engine` function :param metadata: SQLAlchemy metadata. It is required only if `create=True` :param keyword: Keyword used to inject session database in a route :param create: If it is true, execute `metadata.create_all(engine)` when plugin is applied :param commit: If it is true, commit changes after route is executed. :param use_kwargs: plugin inject session database even if it is not explicitly defined, using **kwargs argument if defined. :param create_session: SQLAlchemy session maker created with the 'sessionmaker' function. Will create its own if undefined. ''' self.engine = engine if create_session is None: create_session = sessionmaker() self.create_session = create_session self.metadata = metadata self.keyword = keyword self.create = create self.commit = commit self.use_kwargs = use_kwargs
def select_db(self,tables,column,page=0,page_size=None,**value_column): '''equvalent with select * from tables where column = value_column, this didn't support with order by or join table''' try: session = sessionmaker() session.configure(bind=engine) Base.metadata.create_all(engine) s = session() if ('value' in value_column): kueridb = s.query(tables).filter(column == value_column['value']) else: kueridb = s.query(tables) if page_size != None: kueridb = kueridb.limit(page_size) if page != 0: kueridb = kueridb.offset(page*page_size) list1 = list(s.execute(kueridb)) engine.dispose() return list1 except Exception as e: return False
def select_db(self,tables,column,page=0,page_size=None,**value_column): '''equvalent with select * from tables where column = value_column, this didn't support with order by or join table''' try: session = sessionmaker() session.configure(bind=self.engine) self.Base.metadata.create_all(self.engine) s = session() if ('value' in value_column): kueridb = s.query(tables).filter(column == value_column['value']) else: kueridb = s.query(tables) if page_size != None: kueridb = kueridb.limit(page_size) if page != 0: kueridb = kueridb.offset(page*page_size) list1 = list(s.execute(kueridb)) engine.dispose() return list1 except Exception as e: return False
def _accept_with(cls, target): if isinstance(target, scoped_session): target = target.session_factory if not isinstance(target, sessionmaker) and \ ( not isinstance(target, type) or not issubclass(target, Session) ): raise exc.ArgumentError( "Session event listen on a scoped_session " "requires that its creation callable " "is associated with the Session class.") if isinstance(target, sessionmaker): return target.class_ elif isinstance(target, type): if issubclass(target, scoped_session): return Session elif issubclass(target, Session): return target elif isinstance(target, Session): return target else: return None
def get(self): from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker, scoped_session engine = create_engine(constant.DB_ENGINE) # session_factory = sessionmaker(bind=engine) # DB_Session = scoped_session(session_factory) # DB_Session = session_factory # db_session = DB_Session() DBSession = sessionmaker(bind=engine) db_session = DBSession() query=Query(origin_lng='123', origin_lat='123', destination_lng='123', destination_lat='123', created_at=datetime.date.today()) db_session.add(query) db_session.commit() # count = self.session.query(Query).count() # self.write('{} queries so far!'.format(1))
def create_bootstrap_project(name, project_id=None, db_uri=None): """Creates a new project. :param name: Name of the new project """ if not project_id: project_id = str(uuid.uuid4()) engine = create_engine(db_uri) Session = sessionmaker(bind=engine) session = Session() project = models.Project(name=name, id=project_id) try: project = session.query(models.Project).filter_by(name=name).one() except sa_exc.NoResultFound: session.add(project) session.commit() return project
def session_scope(cls, db_engine=None, session_class=None): """ Provide a scoped db session for a series of operarions. The session is created immediately before the scope begins, and is closed on scope exit. :param db_engine: SQLAlchemy Engine or other Connectable :type db_engine: sqlalchemy.engine.Connectable :param session_class: SQLAlchemy Session :type session_class: sqlalchemy.orm.Session """ if session_class is None: session_class = sessionmaker(bind=db_engine) db_session = session_class() try: yield db_session db_session.commit() except Exception: db_session.rollback() raise finally: db_session.close()
def check_and_clear(): Session = scoped_session(sessionmaker(bind=dbengine)) dbsession = Session() log.info("db check_and_clear started with options {} {}".format(MAX_INST,DBTTL)) oldtime = datetime.datetime.today() - timedelta.relativedelta(days=DBTTL) log.info("looking for unregistered records older than {}".format(oldtime)) inst = dbsession.query(Instance).filter( Instance.state != 'added', Instance.created_at <= oldtime ).delete() try: dbsession.commit() log.info("{} records removed from db".format(len(inst))) except Exception as e: log.error(repr(e)) dbsession.rollback() dbsession.bind.dispose() Session.remove()
def create_instance(iname, ip, uuid): Session = scoped_session(sessionmaker(bind=dbengine)) dbsession = Session() inst = dbsession.query(Instance).filter( Instance.name == iname, Instance.state != 'added').first() if not inst: log.warning("instance {} was not found in db".format(iname)) new_inst = Instance(iname, uuid, ip) dbsession.add(new_inst) new_inst.state = 'added' new_inst.dns_domain = DNS_CONF["domain"] else: inst.state = 'added' inst.dns_domain = DNS_CONF["domain"] inst.uuid = uuid inst.ip = ip inst.updated_at = datetime.datetime.now() try: dbsession.commit() except Exception as e: log.error(repr(e)) dbsession.rollback() dbsession.bind.dispose() Session.remove()
def delete_instance(iname, uuid): Session = scoped_session(sessionmaker(bind=dbengine)) dbsession = Session() inst = dbsession.query(Instance).filter(Instance.name == iname).first() if not inst: log.warning("instance {} was not found in db".format(iname)) else: inst.state = 'deleted' inst.deleted_at = datetime.datetime.now() try: dbsession.commit() except Exception as e: log.error(repr(e)) dbsession.rollback() dbsession.bind.dispose() Session.remove()
def retrieve_model_id_from_hash(db_engine, model_hash): """Retrieves a model id from the database that matches the given hash Args: db_engine (sqlalchemy.engine) A database engine model_hash (str) The model hash to lookup Returns: (int) The model id (if found in DB), None (if not) """ session = sessionmaker(bind=db_engine)() try: saved = session.query(Model)\ .filter_by(model_hash=model_hash)\ .one_or_none() return saved.model_id if saved else None finally: session.close()
def __init__( self, project_path, experiment_hash, model_storage_engine, db_engine, model_group_keys, replace=True ): self.project_path = project_path self.experiment_hash = experiment_hash self.model_storage_engine = model_storage_engine self.db_engine = db_engine self.sessionmaker = sessionmaker(bind=self.db_engine) self.model_group_keys = model_group_keys self.replace = replace
def fake_trained_model(project_path, model_storage_engine, db_engine, train_matrix_uuid='efgh'): """Creates and stores a trivial trained model Args: project_path (string) a desired fs/s3 project path model_storage_engine (triage.storage.ModelStorageEngine) db_engine (sqlalchemy.engine) Returns: (int) model id for database retrieval """ trained_model = MockTrainedModel() model_storage_engine.get_store('abcd').write(trained_model) session = sessionmaker(db_engine)() db_model = Model(model_hash='abcd', train_matrix_uuid=train_matrix_uuid) session.add(db_model) session.commit() return trained_model, db_model.model_id
def fake_trained_model(project_path, model_storage_engine, db_engine, train_matrix_uuid='efgh'): """Creates and stores a trivial trained model Args: project_path (string) a desired fs/s3 project path model_storage_engine (catwalk.storage.ModelStorageEngine) db_engine (sqlalchemy.engine) Returns: (int) model id for database retrieval """ trained_model = MockTrainedModel() model_storage_engine.get_store('abcd').write(trained_model) session = sessionmaker(db_engine)() db_model = Model(model_hash='abcd', train_matrix_uuid=train_matrix_uuid) session.add(db_model) session.commit() return trained_model, db_model.model_id
def fake_trained_model(project_path, model_storage_engine, db_engine): """Creates and stores a trivial trained model Args: project_path (string) a desired fs/s3 project path model_storage_engine (triage.storage.ModelStorageEngine) db_engine (sqlalchemy.engine) Returns: (int) model id for database retrieval """ trained_model = MockTrainedModel() model_storage_engine.get_store('abcd').write(trained_model) session = sessionmaker(db_engine)() db_model = Model(model_hash='abcd') session.add(db_model) session.commit() return trained_model, db_model.model_id
def __init__(self, dbfile=DB_FILE, autocommit=False, dictrows=True, **kwargs): self.dbfile = dbfile self.autocommit = autocommit self.dictrows = dictrows self.path = "sqlite:///{0}".format(self.dbfile) echo = False if TRACE: echo = True # http://docs.sqlalchemy.org/en/latest/orm/contextual.html self.engine = create_engine(self.path, echo=echo) self.handle = sessionmaker(bind=self.engine) self.handle = scoped_session(self.handle) self._session = None self._tx_count = 0 Base.metadata.create_all(self.engine) logger.debug('database path: {}'.format(self.path)) self.clear_memcache()
def connect(self): self.build_connection_string() if self.m_connection_str == None: self.lg("Not connecting to this database", 0) return None self.lg("Connecting to databases(" + str(self.m_connection_str) + ") Autocommit(" + str(self.m_autocommit) + ") Autoflush(" + str(self.m_autoflush) + ")", 7) Base = declarative_base() self.m_engine = create_engine(self.m_connection_str, echo=False) self.m_connection = self.m_engine.connect() self.m_session = scoped_session(sessionmaker(autocommit = self.m_autocommit, autoflush = self.m_autoflush, bind = self.m_engine)) self.lg("Connected to DB(" + str(self.m_name) + ") DBTables(" + str(self.m_database_name) + ")", 7) return None # end of connect
def init_db(app): """ ???????? :param app: :return: """ database_config = app.config.get('DATABASE') engine = create_engine(database_config, convert_unicode=True, echo=False) db_session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=engine)) Base.db_session = db_session Base.query = db_session.query_property() Base.metadata.create_all(bind=engine)
def load_session(db_path): """Load and return a new SQLalchemy session and engine. Parameters ---------- db_path : str Path to desired database location, can be relative or use tilde to specify the user $HOME. Returns ------- session : sqlalchemy.orm.session.Session Session instance. engine : sqlalchemy.engine.Engine Engine instance. """ db_path = "sqlite:///" + path.abspath(path.expanduser(db_path)) engine = create_engine(db_path, echo=False) #it is very important that `autoflush == False`, otherwise if "treatments" or "measurements" entried precede "external_ids" the latter will insert a null on the animal_id column Session = sessionmaker(bind=engine, autoflush=False) session = Session() Base.metadata.create_all(engine) return session, engine
def initialise_database(self, primary_files, secondary_files, pg_functions=[], pg_triggers=[]): """ :param primary_files: SQL files to execute in first pass, before object creation :type primary_files: list of string :param secondary_files: SQL files to execute in second pass, after object creation :type secondary_files: list of string :type pg_functions: list :type pg_triggers: list """ session = sessionmaker(bind=self.engine)() session.connection().connection.set_isolation_level(0) for sql_file in primary_files: execute(session, join(self.sql_dir, sql_file)) self.orm_base.metadata.create_all(self.engine) for pg_func in pg_functions: self.engine.execute(pg_func) for pg_trigger in pg_triggers: self.engine.execute(pg_trigger) command.stamp(self.alembic_config(), self.alembic_version) for sql_file in secondary_files: execute(session, join(self.sql_dir, sql_file))
def create_session(db_string, drop_tables=False): """ Creates a new DB session using the scoped_session that SQLAlchemy provides. :param db_string: The connection string. :type db_string: str :param drop_tables: Drop existing tables? :type drop_tables: bool :return: A SQLAlchemy session object :rtype: sqlalchemy.orm.scoped_session """ global db_engine, Base db_engine = create_engine(db_string, convert_unicode=True) db_session = scoped_session(sessionmaker(bind=db_engine)) Base.query = db_session.query_property() if drop_tables: Base.metadata.drop_all(bind=db_engine) Base.metadata.create_all(bind=db_engine) return db_session
def __init__(self, app, dburi, search): self.log = logging.getLogger('boartty.db') self.dburi = dburi self.search = search self.engine = create_engine(self.dburi) metadata.create_all(self.engine) self.migrate(app) # If we want the objects returned from query() to be usable # outside of the session, we need to expunge them from the session, # and since the DatabaseSession always calls commit() on the session # when the context manager exits, we need to inform the session to # expire objects when it does so. self.session_factory = sessionmaker(bind=self.engine, expire_on_commit=False, autoflush=False) self.session = scoped_session(self.session_factory) self.lock = threading.Lock()
def gen_markov(): # pragma: no cover """Compile all the tweets and create a Markov chain.""" host_url = os.environ.get('REDIS_URL') access_dict = {'sqlalchemy.url': os.environ.get('DATABASE_URL')} engine = get_engine(access_dict) SessionFactory = sessionmaker(bind=engine) session = SessionFactory() tweets = session.query(Tweet).all() big_corpus = '' for tweet in tweets: big_corpus += tweet.tweet + '\n' markov_chain = markovify.NewlineText(big_corpus, state_size=3) to_redis = pickle.dumps(markov_chain) redis.from_url(host_url).set('markov_tweets', to_redis)
def gen_markov(): """Compile all the tweets and create a Markov chain.""" host_url = os.environ.get('REDIS_URL') access_dict = {'sqlalchemy.url': os.environ.get('DATABASE_URL')} engine = get_engine(access_dict) SessionFactory = sessionmaker(bind=engine) session = SessionFactory() tweets = session.query(Tweet).all() big_corpus = '' for tweet in tweets: big_corpus += tweet.tweet + '\n' markov_chain = markovify.NewlineText(big_corpus, state_size=3) to_redis = pickle.dumps(markov_chain) redis.from_url(host_url).set('markov_tweets', to_redis)
def __init__(self, db_url): """ Initialize the Peekaboo database handler. :param db_url: An RFC 1738 URL that points to the database. """ self.__engine = create_engine(db_url) self.__db_con = None session_factory = sessionmaker(bind=self.__engine) self.__Session = scoped_session(session_factory) self.__lock = threading.RLock() try: self.__db_con = self.__engine.connect() except SQLAlchemyError as e: raise PeekabooDatabaseError( 'Unable to connect to the database: %s' % e ) if not self.__db_con.dialect.has_table(self.__engine, '_meta'): self._init_db() logger.debug('Database schema created.') else: self.clear_in_progress()
def _setup(self): kwargs = {} if self._database_url.get_driver_name() == 'postgresql': kwargs['use_native_unicode'] = self._use_native_unicode if self._pool_size is not None: kwargs['pool_size'] = self._pool_size self._engine = create_engine(self._database_url, **kwargs) if self._engine_events: for (name, listener) in self._engine_events: event.listen(self._engine, name, listener) self._factory = sessionmaker() self._factory.configure(bind=self._engine)
def __init__(self, init=None): if init is None: init = {} def callback(): return scoped_session( sessionmaker(class_=RoutedSession, registry=dbs) ) self._session_factory = callback self.storage = threading.local() self.storage.lazies = {} self.storage.concrete = {} self.storage.metadatas = {} self.lazies.update(init)
def create_session(cls, db='gis', engine=None): """ Start a session. :param engine: :return: """ # if a engine in passed used it, else use the one # at the top of the this module engine = engine if engine else ENGINE if engine: Session = sessionmaker() Session.configure(bind=engine) session = Session() else: # fallback: create a new engine engine = start_postgre_engine(db, False) Session = sessionmaker() Session.configure(bind=engine) session = Session() return session
def _accept_with(cls, target): if isinstance(target, orm.scoped_session): target = target.session_factory if not isinstance(target, orm.sessionmaker) and \ ( not isinstance(target, type) or not issubclass(target, orm.Session) ): raise exc.ArgumentError( "Session event listen on a scoped_session " "requires that its creation callable " "is associated with the Session class.") if isinstance(target, orm.sessionmaker): return target.class_ elif isinstance(target, type): if issubclass(target, orm.scoped_session): return orm.Session elif issubclass(target, orm.Session): return target elif isinstance(target, orm.Session): return target else: return None
def __init__(self, data_path, models, encoding=None, markdown_config=None): """Constructor. Args: data_path: The full path to where the database files can be found. models: Loaded model/field data. encoding: The encoding to load files as ('utf-8', etc). If 'None', will default to the system-preferred default encoding """ self.encoding = encoding self.tables = dict() self.data_path = data_path self.models = models self.markdown_config = markdown_config self.engine = create_engine('sqlite:///:memory:') self.Base = declarative_base() self.session = sessionmaker(bind=self.engine)() set_global('session', self.session) self.find_backrefs() self.create_db(models)
def __init__(self, database_url): """Set up SQL Alchemy with the database at 'database_path' database_url can be e.g. 'sqlite:////home/bob/database.db' """ self.engine = create_engine(database_url, convert_unicode=True) self.session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=self.engine)) global Base Base.query = self.session.query_property() self.setup_relationships() Base.metadata.create_all(self.engine) if len(self.session.query(User).all()) == 0: self.add_user('admin', 'admin', 'admin', 'admin', True)
def db_session(request, test_users, test_entries): """Create an in-memory sqlite database, add some test users and entries, and return an sqlalchemy session to it. Close the session when the test is finished with it. """ engine = create_engine('sqlite:///:memory:') Session = sessionmaker(bind=engine) Base.metadata.create_all(engine) session = Session() session.add_all([user for user in test_users.values()]) session.add_all(test_entries) def tearDown(): session.close() request.addfinalizer(tearDown) return session
def get_db_session(): """Provide a transactional scope around a series of operations.""" global session with lock: if is_sqlite: with session.no_autoflush: yield session else: try: sx_factory = sessionmaker(bind=engine, expire_on_commit=False) _sx = scoped_session(sx_factory) sx = _sx() yield sx sx.commit() except: sx.rollback() raise finally: sx.expunge_all() sx.close()
def get_urban_by_id(agency_id, route_id): database_name = _retrieve_database(agency_id) complete_db_name = _get_complete_database_name(database_name) engine = create_engine(complete_db_name) sessionmk = sessionmaker(bind=engine) session = sessionmk() urban_result = {} for urb in session.query(Urban).filter(Urban.route==route_id): urban_result = {"category": urb.category, "interdistance": urb.interdistance, "ratio": urb.ratio } break session.close() return urban_result # Functions for population table
def copy_event_from_stage(staged_event): engine = db_connect() Session = sessionmaker(bind=engine) session = Session() place = session.query(Place) \ .filter(Place.ocd_division_id == staged_event.ocd_division_id).first() event = Event( ocd_division_id=staged_event.ocd_division_id, place_id=place.id, name=staged_event.name, scraped_datetime=staged_event.scraped_datetime, record_date=staged_event.record_date, source=staged_event.source, source_url=staged_event.source_url, meeting_type=staged_event.meeting_type ) event = save_record(event) return event
def reflect_hints_db(db_path): """ Reflect the database schema of the hints database, automapping the existing tables The NullPool is used to avoid concurrency issues with luigi. Using this activates pooling, but since sqlite doesn't really support pooling, what effectively happens is just that it locks the database and the other connections wait. :param db_path: path to hints sqlite database :return: sqlalchemy.MetaData object, sqlalchemy.orm.Session object """ engine = sqlalchemy.create_engine('sqlite:///{}'.format(db_path), poolclass=NullPool) metadata = sqlalchemy.MetaData() metadata.reflect(bind=engine) Base = automap_base(metadata=metadata) Base.prepare() speciesnames = Base.classes.speciesnames seqnames = Base.classes.seqnames hints = Base.classes.hints featuretypes = Base.classes.featuretypes Session = sessionmaker(bind=engine) session = Session() return speciesnames, seqnames, hints, featuretypes, session