我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用alembic.op.create_unique_constraint()。
def batch_create_unique_constraint( cls, operations, constraint_name, columns, **kw): """Issue a "create unique constraint" instruction using the current batch migration context. The batch form of this call omits the ``source`` and ``schema`` arguments from the call. .. seealso:: :meth:`.Operations.create_unique_constraint` .. versionchanged:: 0.8.0 The following positional argument names have been changed: * name -> constraint_name """ kw['schema'] = operations.impl.schema op = cls( constraint_name, operations.impl.table_name, columns, **kw ) return operations.invoke(op)
def upgrade(): # we dont retain historical data as we simply dont care yet op.execute('truncate table filecoverage') # ### commands auto generated by Alembic - please adjust! ### op.add_column('filecoverage', sa.Column( 'build_id', zeus.db.types.guid.GUID(), nullable=False)) op.create_index(op.f('ix_filecoverage_build_id'), 'filecoverage', ['build_id'], unique=False) op.create_unique_constraint('unq_coverage_filname', 'filecoverage', [ 'build_id', 'filename']) op.drop_constraint('unq_job_filname', 'filecoverage', type_='unique') op.drop_constraint('filecoverage_job_id_fkey', 'filecoverage', type_='foreignkey') op.create_foreign_key(None, 'filecoverage', 'build', [ 'build_id'], ['id'], ondelete='CASCADE') op.drop_column('filecoverage', 'job_id') # ### end Alembic commands ###
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('run', sa.Column('id', sa.Integer(), nullable=False), sa.Column('run_id', sa.String(length=30), nullable=True), sa.Column('library_reads_sequenced', sa.BIGINT(), nullable=True), sa.Column('total_num_bases', sa.BIGINT(), nullable=True), sa.Column('download_size', sa.BIGINT(), nullable=True), sa.Column('avg_read_length', sa.Float(), nullable=True), sa.Column('baseA_count', sa.BIGINT(), nullable=True), sa.Column('baseC_count', sa.BIGINT(), nullable=True), sa.Column('baseG_count', sa.BIGINT(), nullable=True), sa.Column('baseT_count', sa.BIGINT(), nullable=True), sa.Column('baseN_count', sa.BIGINT(), nullable=True), sa.Column('gc_percent', sa.Float(), nullable=True), sa.Column('run_quality_counts', sa.Text(), nullable=True), sa.Column('dataset_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['dataset_id'], ['dataset.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_unique_constraint(None, 'dataset', ['db_source_uid']) # ### end Alembic commands ###
def upgrade(): op.add_column("resource_type", sa.Column('tablename', sa.String(18), nullable=True)) resource_type = sa.Table( 'resource_type', sa.MetaData(), sa.Column('name', sa.String(255), nullable=False), sa.Column('tablename', sa.String(18), nullable=True) ) op.execute(resource_type.update().where( resource_type.c.name == "instance_network_interface" ).values({'tablename': op.inline_literal("'instance_net_int'")})) op.execute(resource_type.update().where( resource_type.c.name != "instance_network_interface" ).values({'tablename': resource_type.c.name})) op.alter_column("resource_type", "tablename", type_=sa.String(18), nullable=False) op.create_unique_constraint("uniq_resource_type0tablename", "resource_type", ["tablename"])
def upgrade(): """Upgrade the database to a newer revision.""" # ### commands auto generated by Alembic - please adjust! ### op.create_table('stacks', sa.Column('id', sa.Integer(), nullable=False), sa.Column('is_ref_stack', sa.Boolean(), nullable=False), sa.Column('stack_json', postgresql.JSONB(), nullable=False), sa.PrimaryKeyConstraint('id')) op.add_column('similar_stacks', sa.Column('analysis', postgresql.JSONB())) op.add_column('similar_stacks', sa.Column('similar_stack_id', sa.Integer(), nullable=False)) op.add_column('similar_stacks', sa.Column('similarity_value', sa.Float(), nullable=False)) op.add_column('similar_stacks', sa.Column('stack_id', sa.Integer(), nullable=False)) op.create_unique_constraint('sim_unique', 'similar_stacks', ['stack_id', 'similar_stack_id']) op.drop_constraint('similar_stacks_appstack_id_fkey', 'similar_stacks', type_='foreignkey') op.create_foreign_key(None, 'similar_stacks', 'stacks', ['stack_id'], ['id']) op.create_foreign_key(None, 'similar_stacks', 'stacks', ['similar_stack_id'], ['id']) op.drop_column('similar_stacks', 'dependency_list') op.drop_column('similar_stacks', 'appstack_id') op.drop_table('reference_stacks') op.drop_table('app_stacks') # ### end Alembic commands ###
def upgrade(): table_prefix = context.config.get_main_option('table_prefix') op.create_unique_constraint('_repo_name_unique', table_prefix + 'repos', ['repo_name']) op.alter_column(table_prefix + 'repos', 'user_key', type_=sa.UnicodeText(), existing_type=sa.String(255))
def upgrade(): table_prefix = context.config.get_main_option('table_prefix') op.create_unique_constraint('_env_id_rule_task_unique', table_prefix + 'changes_whitelist', ['env_id', 'rule', 'fuel_task'])
def upgrade(): query = '''UPDATE "user" SET ckan_api=null WHERE id IN (SELECT id FROM (SELECT id, row_number() over (partition BY ckan_api ORDER BY id) AS rnum FROM "user") t WHERE t.rnum > 1); ''' op.execute(query) op.create_unique_constraint('ckan_api_uq', 'user', ['ckan_api'])
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.alter_column('event', 'description', existing_type=sa.VARCHAR(), nullable=False) op.create_unique_constraint(None, 'event', ['description']) # ### end Alembic commands ###
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('event', sa.Column('title', sa.VARCHAR(), autoincrement=False, nullable=False)) op.drop_constraint(None, 'event', type_='foreignkey') op.create_unique_constraint('event_title_key', 'event', ['title']) op.drop_column('event', 'location_id') op.drop_table('location') # ### end Alembic commands ###
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_index(op.f('ix_tag_name'), table_name='tag') op.drop_column('tag', 'name') op.create_unique_constraint('podcast_name_key', 'podcast', ['name']) op.create_unique_constraint('podcast_feed_key', 'podcast', ['feed']) ### end Alembic commands ###
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_unique_constraint('episode_link_key', 'episode', ['link']) op.drop_index(op.f('ix_episode_link'), table_name='episode') op.alter_column('episode', 'title', existing_type=sa.VARCHAR(), nullable=True) ### end Alembic commands ###
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_unique_constraint('episode_title_key', 'episode', ['title']) ### end Alembic commands ###
def upgrade(): op.add_column('resource_class', sa.Column('uuid', sa.String(length=36), nullable=False)) op.create_unique_constraint('uniq_resource_class0uuid', 'resource_class', ['uuid']) op.drop_index('uniq_container0name', table_name='resource_class')
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('tv_serie', sa.Column('year', sa.Date(), nullable=True)) op.create_unique_constraint(None, 'tv_serie', ['id']) ### end Alembic commands ###
def upgrade(): bind = op.get_bind() session = Session(bind=bind) ### commands auto generated by Alembic - please adjust! ### op.add_column('kubes', sa.Column('is_default', sa.Boolean(), nullable=True)) op.create_unique_constraint(None, 'kubes', ['is_default']) ### end Alembic commands ### kube = session.query(Kube).filter(Kube.id >= 0).order_by(Kube.id).first() if kube is not None: kube.is_default = True session.commit()
def upgrade(): session = Session(bind=op.get_bind()) op.drop_column('system_settings', 'created') op.drop_column('system_settings', 'deleted') op.add_column('system_settings', sa.Column('label', sa.Text, nullable=True)) op.add_column('system_settings', sa.Column('description', sa.Text, nullable=True)) op.add_column('system_settings', sa.Column('placeholder', sa.String, nullable=True)) billing_link = session.query(SystemSettings).filter_by(name='billing_apps_link').order_by(SystemSettings.id.desc()).first() if billing_link is not None: last = billing_link.id session.query(SystemSettings).filter(SystemSettings.id!=last).delete() billing_link.label = 'Link to billing system script' billing_link.description = 'Link to predefined application request processing script' billing_link.placeholder = 'http://whmcs.com/script.php' else: bl = SystemSettings(name='billing_apps_link', label='Link to billing system script', description='Link to predefined application request processing script', placeholder = 'http://whmcs.com/script.php') session.add(bl) pd = SystemSettings(name='persitent_disk_max_size', value='10', label='Persistent disk maximum size', description='maximum capacity of a user container persistent disk in GB', placeholder = 'Enter value to limit PD size') session.add(pd) ms = SystemSettings(name='default_smtp_server', label='Default SMTP server', description='Default SMTP server', placeholder = 'Default SMTP server') session.add(ms) session.commit() op.create_unique_constraint('uq_system_settings_name', 'system_settings', ['name'])
def upgrade(): bind = op.get_bind() op.add_column('packages', sa.Column('is_default', sa.Boolean(), nullable=True)) op.create_unique_constraint(None, 'packages', ['is_default']) bind.execute("UPDATE packages SET is_default=true WHERE id in (SELECT MIN(id) FROM packages)")
def upgrade(): conn = op.get_bind() op.drop_column('predefined_apps', 'user_id') op.add_column('pods', sa.Column( 'template_plan_name', sa.String(24), nullable=True)) op.create_unique_constraint('resource_role_name_unique', 'rbac_permission', ['resource_id', 'role_id', 'name']) op.add_column( 'system_settings', sa.Column('setting_group', sa.Text, default=''))
def downgrade(): op.alter_column('package_kube', 'package_id', existing_type=sa.INTEGER(), nullable=True) op.alter_column('package_kube', 'kube_id', existing_type=sa.INTEGER(), nullable=True) op.alter_column('packages', 'suffix', existing_type=sa.String(length=16), nullable=True) op.alter_column('packages', 'prefix', existing_type=sa.String(length=16), nullable=True) op.alter_column('packages', 'name', existing_type=sa.VARCHAR(length=64), nullable=True) op.create_unique_constraint(u'kubes_is_default_key', 'kubes', ['is_default']) op.drop_index('one_default', table_name='kubes') op.alter_column('kubes', 'name', existing_type=sa.VARCHAR(length=64), nullable=True)
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_unique_constraint('User_email_key', 'User', ['email']) op.drop_index(op.f('ix_User_username'), table_name='User') op.alter_column('User', 'email', existing_type=sa.VARCHAR(), nullable=True) op.drop_column('User', 'username') # ### end Alembic commands ###
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('event', sa.Column('token', sa.VARCHAR(), autoincrement=False, nullable=True)) op.create_unique_constraint('event_token_key', 'event', ['token']) # ### end Alembic commands ###
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_unique_constraint('uq_dataset_problem_idtable_name_entity_id', 'dataset', ['problem_id', 'table_name', 'entity_id']) op.drop_constraint('uq_dataset_table_name_entity_id', 'dataset', type_='unique') # ### end Alembic commands ###
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_unique_constraint('uq_dataset_table_name_entity_id', 'dataset', ['table_name', 'entity_id']) op.drop_constraint('uq_dataset_problem_idtable_name_entity_id', 'dataset', type_='unique') # ### end Alembic commands ###
def upgrade(): op.create_unique_constraint('uq_dataset_label_probability_data_id_label_id', 'dataset_label_probability', ['data_id', 'label_id'])
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_unique_constraint(None, 'law_group', ['name']) op.create_unique_constraint(None, 'law_status', ['name']) # ### end Alembic commands ###
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_table('email_mailing_list') op.drop_table('email') op.add_column('mailing_list', sa.Column('url', sa.String(length=50), nullable=True)) op.create_unique_constraint(None, 'mailing_list', ['url']) # ### end Alembic commands ###
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_unique_constraint('_cluster_name_uc', 'user', ['cluster_id', 'name']) # ### end Alembic commands ###
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_unique_constraint('_node_mountpoint_what_uc', 'mountpoint', ['node_id', 'what']) op.create_unique_constraint('_node_mountpoint_where_uc', 'mountpoint', ['node_id', 'where']) # ### end Alembic commands ###
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column( 'reference_set', sa.Column('next_tiledb_column_offset', sa.BigInteger(), nullable=False, default=0)) op.add_column( 'reference', sa.Column('tiledb_column_offset', sa.BigInteger(), nullable=True)) op.alter_column('reference', 'length', existing_type=sa.BIGINT(), nullable=False) op.alter_column('reference', 'name', existing_type=sa.TEXT(), nullable=False) op.create_unique_constraint('unique_name_per_reference_set_constraint', 'reference', ['reference_set_id', 'name']) op.create_index('unique_reference_set_id_offset_idx', 'reference', ['reference_set_id', 'tiledb_column_offset'], unique=True) op.drop_column('reference', 'offset') # Trigger on reference insertion op.execute('''\ CREATE OR REPLACE FUNCTION increment_next_column_in_reference_set_pgsql() RETURNS trigger AS $increment_next_column_in_reference_set_pgsql$ BEGIN UPDATE reference SET tiledb_column_offset=(select next_tiledb_column_offset from reference_set where id=NEW.reference_set_id) where NEW.tiledb_column_offset IS NULL and id=NEW.id; UPDATE reference_set SET next_tiledb_column_offset=next_tiledb_column_offset+NEW.length WHERE id = NEW.reference_set_id; RETURN NEW; END; $increment_next_column_in_reference_set_pgsql$ LANGUAGE plpgsql; CREATE TRIGGER increment_next_column_in_reference_set AFTER INSERT ON reference FOR EACH ROW EXECUTE PROCEDURE increment_next_column_in_reference_set_pgsql(); ''') ### end Alembic commands ###
def create_unique_constraint(self, name, local_cols, **kw): """Issue a "create unique constraint" instruction using the current batch migration context. The batch form of this call omits the ``source`` and ``schema`` arguments from the call. .. seealso:: :meth:`.Operations.create_unique_constraint` """ kw['schema'] = self.impl.schema return super(BatchOperations, self).create_unique_constraint( name, self.impl.table_name, local_cols, **kw)
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_unique_constraint('sendmessage_to_key', 'sendmessage', ['to']) op.create_unique_constraint('sendmessage_state_key', 'sendmessage', ['state']) op.create_unique_constraint('sendmessage_message_key', 'sendmessage', ['message']) # ### end Alembic commands ###
def upgrade(): op.create_unique_constraint( op.f('uq_txn_reconciles_ofx_account_id'), 'txn_reconciles', ['ofx_account_id', 'ofx_fitid'] ) op.create_unique_constraint( op.f('uq_txn_reconciles_txn_id'), 'txn_reconciles', ['txn_id'] )
def upgrade(): """Upgrade the database to a newer revision.""" # commands auto generated by Alembic - please adjust! ### op.add_column('analysis_requests', sa.Column('parent_id', sa.Integer(), nullable=True)) op.add_column('worker_results', sa.Column('worker_id', sa.String(length=64), nullable=True)) op.create_unique_constraint(None, 'worker_results', ['worker_id']) # end Alembic commands ###
def upgrade(): """Upgrade the database to a newer revision.""" # commands auto generated by Alembic - please adjust! ### op.create_table('stacks', sa.Column('id', sa.Integer(), nullable=False), sa.Column('is_ref_stack', sa.Boolean(), nullable=False), sa.Column('stack_json', postgresql.JSONB(), nullable=False), sa.PrimaryKeyConstraint('id')) op.add_column('similar_stacks', sa.Column('analysis', postgresql.JSONB())) op.add_column('similar_stacks', sa.Column('similar_stack_id', sa.Integer(), nullable=False)) op.add_column('similar_stacks', sa.Column('similarity_value', sa.Float(), nullable=False)) op.add_column('similar_stacks', sa.Column('stack_id', sa.Integer(), nullable=False)) op.create_unique_constraint('sim_unique', 'similar_stacks', ['stack_id', 'similar_stack_id']) op.drop_constraint('similar_stacks_appstack_id_fkey', 'similar_stacks', type_='foreignkey') op.create_foreign_key(None, 'similar_stacks', 'stacks', ['stack_id'], ['id']) op.create_foreign_key(None, 'similar_stacks', 'stacks', ['similar_stack_id'], ['id']) op.drop_column('similar_stacks', 'dependency_list') op.drop_column('similar_stacks', 'appstack_id') op.drop_table('reference_stacks') op.drop_table('app_stacks') # end Alembic commands ###
def upgrade(): """Upgrade the database to a newer revision.""" # commands auto generated by Alembic - please adjust! ### op.create_unique_constraint(None, 'ecosystems', ['name']) # end Alembic commands ###
def upgrade(): op.rename_table('knowledge_post_author', 'assoc_post_author') op.rename_table('knowledge_post_tags', 'assoc_post_tag') op.add_column('assoc_post_author', sa.Column('order', sa.Integer(), nullable=True)) op.add_column('posts', sa.Column('uuid', sa.String(length=100), nullable=True)) op.create_unique_constraint(None, 'posts', ['uuid']) op.add_column('pageviews', sa.Column('object_action', sa.String(length=100), nullable=True)) op.add_column('pageviews', sa.Column('version', sa.String(length=100), nullable=True))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('repository', sa.Column('external_id', sa.String(length=64), nullable=True)) op.add_column('repository', sa.Column('provider', zeus.db.types.enum.StrEnum(), nullable=False)) op.create_unique_constraint('unq_external_id', 'repository', ['provider', 'external_id']) # ### end Alembic commands ###
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_constraint('unq_repo_name', 'repository', type_='unique') op.create_unique_constraint('unq_repo_name', 'repository', ['provider', 'owner_name', 'name']) # ### end Alembic commands ###
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_constraint('unq_repo_name', 'repository', type_='unique') op.create_unique_constraint('unq_repo_name', 'repository', ['owner_name', 'name']) # ### end Alembic commands ###
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('filecoverage', sa.Column( 'job_id', postgresql.UUID(), autoincrement=False, nullable=False)) op.drop_constraint(None, 'filecoverage', type_='foreignkey') op.create_foreign_key('filecoverage_job_id_fkey', 'filecoverage', 'job', [ 'job_id'], ['id'], ondelete='CASCADE') op.create_unique_constraint( 'unq_job_filname', 'filecoverage', ['job_id', 'filename']) op.drop_constraint('unq_coverage_filname', 'filecoverage', type_='unique') op.drop_index(op.f('ix_filecoverage_build_id'), table_name='filecoverage') op.drop_column('filecoverage', 'build_id') # ### end Alembic commands ###
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_unique_constraint('unq_external_id', 'repository', ['provider', 'external_id']) op.create_unique_constraint('repository_url_key', 'repository', ['url']) op.create_unique_constraint('repository_name_key', 'repository', ['name']) op.drop_constraint('unq_repo_name', 'repository', type_='unique') op.drop_constraint('unq_repo_external_id', 'repository', type_='unique') op.drop_column('repository', 'owner_name') # ### end Alembic commands ###