我们从Python开源项目中,提取了以下25个代码示例,用于说明如何使用alembic.op.f()。
def upgrade(): op.create_table('mastodon_instances', sa.Column('instance', sa.String(), nullable=False), sa.Column('popularity', sa.Float(), server_default='10', nullable=False), sa.PrimaryKeyConstraint('instance', name=op.f('pk_mastodon_instances')) ) op.execute(""" INSERT INTO mastodon_instances (instance, popularity) VALUES ('mastodon.social', 100), ('mastodon.cloud', 90), ('social.tchncs.de', 80), ('mastodon.xyz', 70), ('mstdn.io', 60), ('awoo.space', 50), ('cybre.space', 40), ('mastodon.art', 30) ; """)
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'failurereason', sa.Column('job_id', zeus.db.types.guid.GUID(), nullable=False), sa.Column('reason', zeus.db.types.enum.StrEnum(), nullable=False), sa.Column('repository_id', zeus.db.types.guid.GUID(), nullable=False), sa.Column('id', zeus.db.types.guid.GUID(), nullable=False), sa.Column( 'date_created', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False ), sa.ForeignKeyConstraint(['job_id'], ['job.id'], ondelete='CASCADE'), sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('job_id', 'reason', name='unq_failurereason_key') ) op.create_index( op.f('ix_failurereason_repository_id'), 'failurereason', ['repository_id'], unique=False ) # ### end Alembic commands ###
def downgrade(): op.drop_table('network_interfaces') op.drop_index(op.f('ix_devices_labels'), table_name='labels') op.drop_table('labels') op.drop_table('network_devices') op.drop_table('hosts') op.drop_index(op.f('ix_networks_region_id'), table_name='networks') op.drop_index(op.f('ix_networks_cloud_id'), table_name='networks') op.drop_index(op.f('ix_networks_project_id'), table_name='networks') op.drop_index(op.f('ix_networks_cell_id'), table_name='networks') op.drop_table('networks') op.drop_index(op.f('ix_devices_region_id'), table_name='devices') op.drop_index(op.f('ix_devices_cloud_id'), table_name='devices') op.drop_index(op.f('ix_devices_project_id'), table_name='devices') op.drop_index(op.f('ix_devices_cell_id'), table_name='devices') op.drop_table('devices') op.drop_index(op.f('ix_cells_region_id'), table_name='cells') op.drop_index(op.f('ix_cells_cloud_id'), table_name='cells') op.drop_index(op.f('ix_cells_project_id'), table_name='cells') op.drop_table('cells') op.drop_index(op.f('ix_users_project_id'), table_name='users') op.drop_index(op.f('ix_regions_project_id'), table_name='regions') op.drop_index(op.f('ix_regions_cloud_id'), table_name='regions') op.drop_table('regions') op.drop_index(op.f('ix_clouds_project_id'), table_name='clouds') op.drop_table('clouds') op.drop_table('users') op.drop_table('projects') op.drop_index(op.f('ix_variable_keys'), table_name='variables') op.drop_table('variables') op.drop_table('variable_association')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_table('predictions', schema='results') op.drop_table('feature_importances', schema='results') op.drop_table('evaluations', schema='results') op.drop_index(op.f('ix_results_models_model_hash'), table_name='models', schema='results') op.drop_table('models', schema='results') op.drop_table('model_groups', schema='results') op.drop_table('experiments', schema='results') op.execute('DROP SCHEMA results') # ### end Alembic commands ###
def downgrade(): op.drop_table('tags_vulnerabilities') op.drop_table('reports') op.drop_table('vulnerabilities') op.drop_table('samples') op.drop_table('fqdns_typosquats') op.drop_table('ah_startup_config_params') op.drop_table('ah_runtime_config_params') op.drop_table('users') op.drop_table('ip_ranges') op.drop_table('fqdns') op.drop_table('emails_organizations') op.drop_table('contacts') op.drop_table('contactemails_organizations') op.drop_table('asn') op.drop_table('ah_startup_configs') op.drop_table('ah_runtime_configs') op.drop_index(op.f('ix_organizations_abbreviation'), table_name='organizations') op.drop_table('organizations') op.drop_table('deliverable_files') op.drop_table('ah_bots') op.drop_table('tasks_taskmeta') op.drop_table('tasks_groupmeta') op.drop_table('tags') op.drop_index(op.f('ix_roles_default'), table_name='roles') op.drop_table('roles') op.drop_table('report_types') op.drop_table('organization_groups') op.drop_table('emails') op.drop_table('deliverables') op.drop_table('ah_bot_types')
def upgrade(): op.create_table('mastodon_app', sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.Column('instance', sa.String(), nullable=False), sa.Column('client_id', sa.String(), nullable=False), sa.Column('client_secret', sa.String(), nullable=False), sa.Column('protocol', sa.Enum('http', 'https', name='enum_protocol'), nullable=False), sa.PrimaryKeyConstraint('instance', name=op.f('pk_mastodon_app')) )
def upgrade(): """ Upgrade to add groups. :return: """ op.create_table('group', sa.Column('group_name', sa.String(length=128), nullable=False), sa.Column('device_list', sa.String(length=1024), nullable=False), sa.PrimaryKeyConstraint('group_name', name=op.f('group_pkey')) ) op.execute(textwrap.dedent(""" CREATE OR REPLACE FUNCTION public.upsert_group(p_group_name character varying, p_device_list character varying) RETURNS integer AS $BODY$ DECLARE num_rows integer; BEGIN INSERT INTO public.group AS gro (group_name, device_list) VALUES (p_group_name, p_device_list) ON CONFLICT (group_name) DO UPDATE SET device_list = p_device_list WHERE gro.group_name = p_group_name; GET DIAGNOSTICS num_rows = ROW_COUNT; RETURN num_rows; END; $BODY$ LANGUAGE plpgsql VOLATILE COST 100;"""))
def _foreign_key_constraint(self, name, source, referent, local_cols, remote_cols, onupdate=None, ondelete=None, deferrable=None, source_schema=None, referent_schema=None, initially=None, match=None, **dialect_kw): m = self._metadata() if source == referent: t1_cols = local_cols + remote_cols else: t1_cols = local_cols sa_schema.Table( referent, m, *[sa_schema.Column(n, NULLTYPE) for n in remote_cols], schema=referent_schema) t1 = sa_schema.Table( source, m, *[sa_schema.Column(n, NULLTYPE) for n in t1_cols], schema=source_schema) tname = "%s.%s" % (referent_schema, referent) if referent_schema \ else referent if util.sqla_08: # "match" kw unsupported in 0.7 dialect_kw['match'] = match f = sa_schema.ForeignKeyConstraint(local_cols, ["%s.%s" % (tname, n) for n in remote_cols], name=name, onupdate=onupdate, ondelete=ondelete, deferrable=deferrable, initially=initially, **dialect_kw ) t1.append_constraint(f) return f
def _table(self, name, *columns, **kw): m = self._metadata() t = sa_schema.Table(name, m, *columns, **kw) for f in t.foreign_keys: self._ensure_table_for_fk(m, f) return t
def downgrade(): op.drop_constraint( op.f( 'fk_transactions_transfer_id_transactions'), 'transactions', type_='foreignkey' ) op.drop_column('transactions', 'transfer_id')
def upgrade(): op.create_unique_constraint( op.f('uq_txn_reconciles_ofx_account_id'), 'txn_reconciles', ['ofx_account_id', 'ofx_fitid'] ) op.create_unique_constraint( op.f('uq_txn_reconciles_txn_id'), 'txn_reconciles', ['txn_id'] )
def downgrade(): op.drop_constraint( op.f('uq_txn_reconciles_txn_id'), 'txn_reconciles', type_='unique' ) op.drop_constraint( op.f('uq_txn_reconciles_ofx_account_id'), 'txn_reconciles', type_='unique' )
def upgrade(): op.create_table( 'projects', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=40), nullable=True), sa.Column('notes', sa.String(length=254), nullable=True), sa.Column('is_active', sa.Boolean(), nullable=True), sa.PrimaryKeyConstraint('id', name=op.f('pk_projects')), mysql_engine='InnoDB' ) op.create_table( 'bom_items', sa.Column('id', sa.Integer(), nullable=False), sa.Column('project_id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=254), nullable=True), sa.Column('notes', sa.String(length=254), nullable=True), sa.Column('quantity', sa.Integer(), nullable=True), sa.Column( 'unit_cost', sa.Numeric(precision=10, scale=4), nullable=True ), sa.Column('url', sa.String(length=254), nullable=True), sa.Column('is_active', sa.Boolean(), nullable=True), sa.ForeignKeyConstraint( ['project_id'], ['projects.id'], name=op.f('fk_bom_items_project_id_projects') ), sa.PrimaryKeyConstraint('id', name=op.f('pk_bom_items')), mysql_engine='InnoDB' )
def downgrade(): op.execute('DROP FUNCTION IF EXISTS next_item_value(uuid)') # ### commands auto generated by Alembic - please adjust! ### op.drop_index(op.f('ix_artifact_repository_id'), table_name='artifact') op.drop_table('artifact') op.drop_index(op.f('ix_testcase_repository_id'), table_name='testcase') op.drop_table('testcase') op.drop_index(op.f('ix_filecoverage_repository_id'), table_name='filecoverage') op.drop_table('filecoverage') op.drop_index(op.f('ix_job_repository_id'), table_name='job') op.drop_index(op.f('ix_job_build_id'), table_name='job') op.drop_table('job') op.drop_index(op.f('ix_build_source_id'), table_name='build') op.drop_index(op.f('ix_build_repository_id'), table_name='build') op.drop_index(op.f('ix_build_author_id'), table_name='build') op.drop_table('build') op.drop_index(op.f('ix_source_repository_id'), table_name='source') op.drop_index('idx_source_repo_sha', table_name='source') op.drop_table('source') op.drop_index(op.f('ix_patch_repository_id'), table_name='patch') op.drop_index('idx_repo_sha', table_name='patch') op.drop_table('patch') op.drop_index(op.f('ix_revision_repository_id'), table_name='revision') op.drop_index(op.f('ix_revision_committer_id'), table_name='revision') op.drop_index(op.f('ix_revision_author_id'), table_name='revision') op.drop_table('revision') op.drop_table('repository_access') op.drop_index(op.f('ix_identity_user_id'), table_name='identity') op.drop_table('identity') op.drop_index(op.f('ix_hook_repository_id'), table_name='hook') op.drop_table('hook') op.drop_index(op.f('ix_author_repository_id'), table_name='author') op.drop_table('author') op.drop_table('api_token_repository_access') op.drop_table('user') op.drop_table('repository') op.drop_table('itemstat') op.drop_table('itemsequence') op.drop_table('itemoption') op.drop_table('api_token') # ### end Alembic commands ###
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('styleviolation', sa.Column('job_id', zeus.db.types.guid.GUID(), nullable=False), sa.Column('filename', sa.Text(), nullable=False), sa.Column('severity', zeus.db.types.enum.Enum(), nullable=False), sa.Column('message', sa.Text(), nullable=False), sa.Column('lineno', sa.Integer(), nullable=True), sa.Column('colno', sa.Integer(), nullable=True), sa.Column('source', sa.Text(), nullable=True), sa.Column('repository_id', zeus.db.types.guid.GUID(), nullable=False), sa.Column('id', zeus.db.types.guid.GUID(), nullable=False), sa.Column( 'date_created', sa.TIMESTAMP( timezone=True), server_default=sa.text('now()'), nullable=False), sa.ForeignKeyConstraint(['job_id'], ['job.id'], ondelete='CASCADE'), sa.ForeignKeyConstraint( ['repository_id'], ['repository.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('id') ) op.create_index( op.f('ix_styleviolation_repository_id'), 'styleviolation', ['repository_id'], unique=False) # ### end Alembic commands ###
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_index(op.f('ix_styleviolation_repository_id'), table_name='styleviolation') op.drop_table('styleviolation') # ### end Alembic commands ###
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_index(op.f('ix_failurereason_repository_id'), table_name='failurereason') op.drop_table('failurereason') # ### end Alembic commands ###
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_table('track_tags') op.drop_index(op.f('ix_tracks_title'), table_name='tracks') op.drop_index(op.f('ix_tracks_path'), table_name='tracks') op.drop_index(op.f('ix_tracks_lib_id'), table_name='tracks') op.drop_index(op.f('ix_tracks_artist_id'), table_name='tracks') op.drop_index(op.f('ix_tracks_album_id'), table_name='tracks') op.drop_table('tracks') op.drop_table('album_tags') op.drop_table('album_images') op.drop_table('artist_tags') op.drop_table('artist_images') op.drop_index(op.f('ix_albums_title'), table_name='albums') op.drop_index(op.f('ix_albums_lib_id'), table_name='albums') op.drop_index(op.f('ix_albums_artist_id'), table_name='albums') op.drop_table('albums') op.drop_index(op.f('ix_tags_lib_id'), table_name='tags') op.drop_table('tags') op.drop_index(op.f('ix_artists_name'), table_name='artists') op.drop_index(op.f('ix_artists_lib_id'), table_name='artists') op.drop_table('artists') op.drop_table('meta') op.drop_table('libraries') op.drop_table('images') # ### end Alembic commands ###
def upgrade(): op.create_table('accounts', sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.Column('id', sa.String(), nullable=False), sa.Column('policy_enabled', sa.Boolean(), server_default='FALSE', nullable=False), sa.Column('policy_keep_latest', sa.Integer(), server_default='0', nullable=False), sa.Column('policy_keep_favourites', sa.Boolean(), server_default='TRUE', nullable=False), sa.Column('policy_delete_every', sa.Interval(), server_default='0', nullable=False), sa.Column('policy_keep_younger', sa.Interval(), server_default='0', nullable=False), sa.Column('display_name', sa.String(), nullable=True), sa.Column('screen_name', sa.String(), nullable=True), sa.Column('avatar_url', sa.String(), nullable=True), sa.Column('last_fetch', sa.DateTime(), server_default='epoch', nullable=True), sa.Column('last_delete', sa.DateTime(), server_default='epoch', nullable=True), sa.PrimaryKeyConstraint('id', name=op.f('pk_accounts')) ) op.create_table('oauth_tokens', sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.Column('token', sa.String(), nullable=False), sa.Column('token_secret', sa.String(), nullable=False), sa.Column('account_id', sa.String(), nullable=True), sa.ForeignKeyConstraint(['account_id'], ['accounts.id'], name=op.f('fk_oauth_tokens_account_id_accounts'), onupdate='CASCADE', ondelete='CASCADE'), sa.PrimaryKeyConstraint('token', name=op.f('pk_oauth_tokens')) ) op.create_table('posts', sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.Column('id', sa.String(), nullable=False), sa.Column('body', sa.String(), nullable=True), sa.Column('author_id', sa.String(), nullable=False), sa.Column('favourite', sa.Boolean(), server_default='FALSE', nullable=False), sa.ForeignKeyConstraint(['author_id'], ['accounts.id'], name=op.f('fk_posts_author_id_accounts'), onupdate='CASCADE', ondelete='CASCADE'), sa.PrimaryKeyConstraint('id', name=op.f('pk_posts')) ) op.create_table('sessions', sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.Column('id', sa.String(), nullable=False), sa.Column('account_id', sa.String(), nullable=False), sa.ForeignKeyConstraint(['account_id'], ['accounts.id'], name=op.f('fk_sessions_account_id_accounts'), onupdate='CASCADE', ondelete='CASCADE'), sa.PrimaryKeyConstraint('id', name=op.f('pk_sessions')) ) op.create_table('twitter_archives', sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.Column('id', sa.Integer(), nullable=False), sa.Column('account_id', sa.String(), nullable=False), sa.Column('body', sa.LargeBinary(), nullable=False), sa.Column('chunks', sa.Integer(), nullable=True), sa.Column('chunks_successful', sa.Integer(), server_default='0', nullable=False), sa.Column('chunks_failed', sa.Integer(), server_default='0', nullable=False), sa.ForeignKeyConstraint(['account_id'], ['accounts.id'], name=op.f('fk_twitter_archives_account_id_accounts'), onupdate='CASCADE', ondelete='CASCADE'), sa.PrimaryKeyConstraint('id', name=op.f('pk_twitter_archives')) )
def upgrade(): """Upgrade instructions.""" op.execute('CREATE EXTENSION IF NOT EXISTS "uuid-ossp"') op.create_table( 'feed', sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), sa.Column('name', sa.Unicode(), nullable=True), sa.Column('url', sa.Unicode(), nullable=False), sa.PrimaryKeyConstraint('id', name=op.f('pk_feed')), sa.UniqueConstraint('url', name=op.f('uq_feed_url')) ) op.create_table( 'user', sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), sa.Column('email', sa.Unicode(), nullable=False), sa.Column('password', sa.Unicode(), nullable=False), sa.Column('name', sa.Unicode(), nullable=False), sa.PrimaryKeyConstraint('id', name=op.f('pk_user')), sa.UniqueConstraint('email', name=op.f('uq_user_email')) ) op.create_table( 'article', sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), sa.Column('feed_id', postgresql.UUID(), nullable=False), sa.Column('title', sa.Unicode(), nullable=False), sa.Column('url', sa.Unicode(), nullable=False), sa.Column('html_text', sa.Unicode(), nullable=False), sa.Column('clean_text', sa.Unicode(), nullable=False), sa.Column('publication_date', postgresql.TIMESTAMP(timezone='UTC'), nullable=False), sa.ForeignKeyConstraint(['feed_id'], ['feed.id'], name=op.f('fk_article_feed_id_feed')), sa.PrimaryKeyConstraint('id', name=op.f('pk_article')) ) op.create_table( 'subscription', sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), sa.Column('user_id', postgresql.UUID(), nullable=False), sa.Column('feed_id', postgresql.UUID(), nullable=False), sa.ForeignKeyConstraint(['feed_id'], ['feed.id'], name=op.f('fk_subscription_feed_id_feed')), sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_subscription_user_id_user')), sa.PrimaryKeyConstraint('id', name=op.f('pk_subscription')) ) op.create_table( 'rating', sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), sa.Column('user_id', postgresql.UUID(), nullable=False), sa.Column('article_id', postgresql.UUID(), nullable=False), sa.Column('feed_id', postgresql.UUID(), nullable=False), sa.Column('user_rating', sa.DECIMAL(precision=4, scale=2), nullable=True), sa.Column('machine_rating', sa.DECIMAL(precision=4, scale=2), nullable=True), sa.Column('read', sa.Boolean(), server_default=sa.text('FALSE'), nullable=False), sa.ForeignKeyConstraint(['article_id'], ['article.id'], name=op.f('fk_rating_article_id_article')), sa.ForeignKeyConstraint(['feed_id'], ['feed.id'], name=op.f('fk_rating_feed_id_feed')), sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_rating_user_id_user')), sa.PrimaryKeyConstraint('id', name=op.f('pk_rating')) )
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('profile', sa.Column('profile_name', sa.String(length=128), nullable=False), sa.Column('properties', JSONB(), nullable=False), sa.PrimaryKeyConstraint('profile_name', name=op.f('profile_pkey')) ) op.create_table('configuration', sa.Column('key', sa.String(length=128), nullable=False), sa.Column('value', sa.String(length=1024), nullable=False), sa.PrimaryKeyConstraint('key', name=op.f('configuration_pkey')) ) op.create_table('device', sa.Column('device_id', sa.Integer(), nullable=False, autoincrement=True), sa.Column('device_type', sa.String(length=64), nullable=False), sa.Column('properties', JSONB(), nullable=True), sa.Column('hostname', sa.String(length=256), nullable=True), sa.Column('ip_address', sa.String(length=64), nullable=True), sa.Column('mac_address', sa.String(length=64), nullable=True), sa.Column('profile_name', sa.String(length=128), nullable=True), sa.Column('deleted', sa.BOOLEAN(), server_default=false_just_for_sqlalchemy(), nullable=False), sa.PrimaryKeyConstraint('device_id', name=op.f('device_pkey')), sa.ForeignKeyConstraint(['profile_name'], ['profile.profile_name'], name='device_profile', match='SIMPLE', ondelete='NO ACTION', onupdate='NO ACTION') ) op.create_table('log', sa.Column('process', sa.String(length=128), nullable=True), sa.Column('timestamp', sa.DateTime(timezone=True), nullable=False, server_default=func.now()), sa.Column('level', sa.Integer(), nullable=False), sa.Column('device_id', sa.Integer(), nullable=True), sa.Column('message', sa.Text(), nullable=False), sa.ForeignKeyConstraint(['device_id'], ['device.device_id'], name='log_process', match='SIMPLE', ondelete='NO ACTION', onupdate='NO ACTION'), sa.CheckConstraint('level = ANY (ARRAY[0, 10, 15, 20, 30, 40, 50])', name=op.f('valid_log_levels')) ) creating_functions() # ### end Alembic commands ###
def downgrade(): op.drop_table('quark_port_ip_address_associations') op.drop_table('quark_dns_nameservers') op.drop_index(op.f('ix_quark_ip_addresses_version'), table_name='quark_ip_addresses') op.drop_index(op.f('ix_quark_ip_addresses_deallocated_at'), table_name='quark_ip_addresses') op.drop_index(op.f('ix_quark_ip_addresses_address'), table_name='quark_ip_addresses') op.drop_table('quark_ip_addresses') op.drop_table('quark_port_security_group_associations') op.drop_table('quark_routes') op.drop_index(op.f('ix_quark_subnets_tenant_id'), table_name='quark_subnets') op.drop_index(op.f('ix_quark_subnets_segment_id'), table_name='quark_subnets') op.drop_table('quark_subnets') op.drop_index(op.f('ix_quark_ports_name'), table_name='quark_ports') op.drop_index(op.f('ix_quark_ports_device_id'), table_name='quark_ports') op.drop_index('idx_ports_3', table_name='quark_ports') op.drop_index('idx_ports_2', table_name='quark_ports') op.drop_index('idx_ports_1', table_name='quark_ports') op.drop_table('quark_ports') op.drop_index(op.f('ix_quark_networks_tenant_id'), table_name='quark_networks') op.drop_table('quark_networks') op.drop_table('quark_ip_policy_cidrs') op.drop_index(op.f('ix_quark_mac_addresses_deallocated_at'), table_name='quark_mac_addresses') op.drop_table('quark_mac_addresses') op.drop_table('quark_security_group_rule') op.drop_table('quark_tags') op.drop_index(op.f('ix_quark_security_groups_tenant_id'), table_name='quark_security_groups') op.drop_table('quark_security_groups') op.drop_table('quark_ip_policy') op.drop_table('quark_tag_associations') op.drop_table('quark_mac_address_ranges') op.drop_index(op.f('ix_quark_nvp_driver_lswitchport_port_id'), table_name='quark_nvp_driver_lswitchport') op.drop_table('quark_nvp_driver_lswitchport') op.drop_index(op.f('ix_quark_nvp_driver_security_profile_nvp_id'), table_name='quark_nvp_driver_security_profile') op.drop_table('quark_nvp_driver_security_profile') op.drop_index(op.f('ix_quark_nvp_driver_lswitch_nvp_id'), table_name='quark_nvp_driver_lswitch') op.drop_index(op.f('ix_quark_nvp_driver_lswitch_network_id'), table_name='quark_nvp_driver_lswitch') op.drop_table('quark_nvp_driver_lswitch') op.drop_index(op.f('ix_quotas_tenant_id'), table_name='quotas') op.drop_table('quotas') op.drop_table('quark_nvp_driver_qos')
def f(self, name): """Indicate a string name that has already had a naming convention applied to it. This feature combines with the SQLAlchemy ``naming_convention`` feature to disambiguate constraint names that have already had naming conventions applied to them, versus those that have not. This is necessary in the case that the ``"%(constraint_name)s"`` token is used within a naming convention, so that it can be identified that this particular name should remain fixed. If the :meth:`.Operations.f` is used on a constraint, the naming convention will not take effect:: op.add_column('t', 'x', Boolean(name=op.f('ck_bool_t_x'))) Above, the CHECK constraint generated will have the name ``ck_bool_t_x`` regardless of whether or not a naming convention is in use. Alternatively, if a naming convention is in use, and 'f' is not used, names will be converted along conventions. If the ``target_metadata`` contains the naming convention ``{"ck": "ck_bool_%(table_name)s_%(constraint_name)s"}``, then the output of the following: op.add_column('t', 'x', Boolean(name='x')) will be:: CONSTRAINT ck_bool_t_x CHECK (x in (1, 0))) The function is rendered in the output of autogenerate when a particular constraint name is already converted, for SQLAlchemy version **0.9.4 and greater only**. Even though ``naming_convention`` was introduced in 0.9.2, the string disambiguation service is new as of 0.9.4. .. versionadded:: 0.6.4 """ if conv: return conv(name) else: raise NotImplementedError( "op.f() feature requires SQLAlchemy 0.9.4 or greater.")
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column( 'transactions', sa.Column('transfer_id', sa.Integer(), nullable=True) ) op.create_foreign_key( op.f('fk_transactions_transfer_id_transactions'), 'transactions', 'transactions', ['transfer_id'], ['id'] ) bind = op.get_bind() session = Session(bind=bind) # begin data manipulation last_txn = None for txn in session.query(Transaction).filter( Transaction.description.like('Budget Transfer - %') ).order_by(Transaction.id.asc()).all(): if last_txn is None: last_txn = txn continue if ( txn.description == last_txn.description and txn.date == last_txn.date and txn.notes == last_txn.notes and txn.account_id == last_txn.account_id ): # txn and last_txn are a transfer last_txn.transfer_id = txn.id txn.transfer_id = last_txn.id session.add(txn) session.add(last_txn) logger.warning( 'Inferred Transfer relationship between Transactions %d and %d', last_txn.id, txn.id ) last_txn = None continue last_txn = txn session.commit() # ### end Alembic commands ###
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'vehicles', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=254), nullable=True), sa.Column('is_active', sa.Boolean(), nullable=True), sa.PrimaryKeyConstraint('id', name=op.f('pk_vehicles')), mysql_engine='InnoDB' ) op.create_table( 'fuellog', sa.Column('id', sa.Integer(), nullable=False), sa.Column('date', sa.Date(), nullable=True), sa.Column('vehicle_id', sa.Integer(), nullable=True), sa.Column('odometer_miles', sa.Integer(), nullable=True), sa.Column('reported_miles', sa.SmallInteger(), nullable=True), sa.Column('calculated_miles', sa.SmallInteger(), nullable=True), sa.Column('level_before', sa.SmallInteger(), nullable=True), sa.Column('level_after', sa.SmallInteger(), nullable=True), sa.Column('fill_location', sa.String(length=254), nullable=True), sa.Column( 'cost_per_gallon', sa.Numeric(precision=10, scale=4), nullable=True ), sa.Column( 'total_cost', sa.Numeric(precision=10, scale=4), nullable=True ), sa.Column( 'gallons', sa.Numeric(precision=10, scale=4), nullable=True ), sa.Column( 'reported_mpg', sa.Numeric(precision=10, scale=4), nullable=True ), sa.Column( 'calculated_mpg', sa.Numeric(precision=10, scale=4), nullable=True ), sa.Column('notes', sa.String(length=254), nullable=True), sa.ForeignKeyConstraint( ['vehicle_id'], ['vehicles.id'], name=op.f('fk_fuellog_vehicle_id_vehicles') ), sa.PrimaryKeyConstraint('id', name=op.f('pk_fuellog')), mysql_engine='InnoDB' ) # ### end Alembic commands ###