我们从Python开源项目中,提取了以下41个代码示例,用于说明如何使用MySQLdb.IntegrityError()。
def registerUser(self, uid, cid, uname): sql = "INSERT INTO `users` (uid, cid, uname) VALUES (%s,%s,%s)" self.lock.acquire() cur = self.conn.cursor() res = 0 try: cur.execute(sql, ( uid, cid, uname, )) except MySQLdb.IntegrityError: res = 1 except Exception as e: print(e, datetime.datetime.now()) traceback.print_exc() res = 2 self.conn.commit() cur.close() self.lock.release() return res
def addTopic(self, cid, topic): if not self.rdr.validTopic(topic): topic = self.rdr.closest(topic) if topic == None: return 2 sql = "INSERT INTO `topics` (cid, topic) VALUES (%s, %s)" self.lock.acquire() cur = self.conn.cursor() res = 0 try: cur.execute(sql, ( cid, topic, )) except MySQLdb.IntegrityError: res = 1 except Exception as e: print(e, datetime.datetime.now()) traceback.print_exc() res = 3 self.conn.commit() cur.close() self.lock.release() return res
def insert_row(table, insert): """ Execute an ``INSERT`` on a table. :param basestring table: table name :param dict insert: inserted data, with keys as the column name and values as the corresponding values :returns: the id of the inserted row :rtype: int or long :raises AlreadyExists: if the row already exists and insertion would result in duplicate keys """ try: keys = _encode_name(insert.keys()) values = ','.join([_encode(value) for value in insert.values()]) return _execute("INSERT INTO %s (%s) VALUES (%s);" % (_encode_name(table), keys, values), write=True, ret='lastrowid') except MySQLdb.IntegrityError as e: if e[0] == 1062: raise AlreadyExists raise
def process_item(self, item, spider): if len(item['ip_port']): a = Proxy( ip_port=item['ip_port'], type=item['type'], level=item['level'], location=item['location'], speed=item['speed'], lifetime=item['lifetime'], lastcheck=item['lastcheck'], rule_id=item['rule_id'], source=item['source'] ) session = loadSession() try: session.merge(a) session.commit() except MySQLdb.IntegrityError, e: log.msg("MySQL Error: %s" % str(e), _level=logging.WARNING) return item else: log.msg("ip_port is invalid!",_level=logging.WARNING)
def process_item(self, item, spider): conn = MySQLdb.connect(host='localhost', user='root', passwd='wuchujie', charset='utf8', db='cl1024') cur = conn.cursor() sql = 'insert into cl1024_torrent(cl_name, cl_url, cl_bankuai, posted, torrent_url, torrent_downloaded, torrent_download_urls) values(%s, %s, %s, %s, %s, %s, %s)' keys = ['cl_title', 'cl_url', 'cl_bankuai', 'posted', 'torrent_url', 'torrent_downloaded', 'torrent_download_urls'] # for i in item.keys(): # try: # item[i] = str(item.get(i).encode('utf-8')) # except: # item[i] = str(item.get(i)) # finally: # pass values = [item.get(x) for x in keys] try: cur.execute(sql, values) except MySQLdb.IntegrityError: pass conn.commit() cur.close() conn.close() return item
def select_group_ids(self, resource_name, timestamp): """Select the group ids from a snapshot table. Args: resource_name (str): String of the resource name. timestamp (str): String of timestamp, formatted as YYYYMMDDTHHMMSSZ. Returns: list: A list of group ids. Raises: MySQLError: When an error has occured while executing the query. """ try: group_ids_sql = select_data.GROUP_IDS.format(timestamp) cursor = self.conn.cursor(cursorclass=cursors.DictCursor) cursor.execute(group_ids_sql) rows = cursor.fetchall() return [row['group_id'] for row in rows] except (DataError, IntegrityError, InternalError, NotSupportedError, OperationalError, ProgrammingError) as e: raise MySQLError(resource_name, e)
def execute_sql_with_fetch(self, resource_name, sql, values): """Executes a provided sql statement with fetch. Args: resource_name (str): String of the resource name. sql (str): String of the sql statement. values (tuple): Tuple of string for sql placeholder values. Returns: list: A list of dict representing rows of sql query result. Raises: MySQLError: When an error has occured while executing the query. """ try: cursor = self.conn.cursor(cursorclass=cursors.DictCursor) cursor.execute(sql, values) return cursor.fetchall() except (DataError, IntegrityError, InternalError, NotSupportedError, OperationalError, ProgrammingError) as e: raise MySQLError(resource_name, e)
def execute_sql_with_commit(self, resource_name, sql, values): """Executes a provided sql statement with commit. Args: resource_name (str): String of the resource name. sql (str): String of the sql statement. values (tuple): Tuple of string for sql placeholder values. Raises: MySQLError: When an error has occured while executing the query. """ try: cursor = self.conn.cursor() cursor.execute(sql, values) self.conn.commit() except (DataError, IntegrityError, InternalError, NotSupportedError, OperationalError, ProgrammingError) as e: raise MySQLError(resource_name, e)
def insert(self, data, hostnames): """ Insert a job record """ cur = self.con.cursor() try: query = "INSERT INTO job (resource_id, local_job_id, start_time_ts, end_time_ts, record) VALUES(%s,%s,%s,%s,COMPRESS(%s))" cur.execute(query, data) for host in hostnames: if host not in self._hostlistcache: cur.execute("INSERT IGNORE INTO hosts (hostname) VALUES (%s)", [host]) self.con.commit() self._hostlistcache[host] = 1 cur.execute("INSERT INTO jobhosts (jobid, hostid) VALUES( (SELECT id FROM job WHERE resource_id = %s AND local_job_id = %s AND end_time_ts = %s), (SELECT id FROM hosts WHERE hostname = %s) )", [data[0], data[1], data[3], host]) cur.execute("INSERT INTO process (jobid, ingest_version) VALUES ( (SELECT id FROM job WHERE resource_id = %s AND local_job_id = %s AND end_time_ts = %s), %s)", [data[0], data[1], data[3], INGEST_VERSION]) except mdb.IntegrityError as e: if e[0] != 1062: raise e # else: # Todo - check that the blobs match on duplicate records self.buffered += 1 if self.buffered > 100: self.con.commit() self.buffered = 0
def insert(self, resource_id, hostname, filename, start, end, jobid): """ Insert a job record """ cur = self.con.cursor() try: if hostname not in self._hostnamecache: cur.execute("INSERT IGNORE INTO hosts (hostname) VALUES (%s)", [hostname]) self.con.commit() self._hostnamecache[hostname] = 1 query = """INSERT INTO archive (hostid, filename, start_time_ts, end_time_ts, jobid) VALUES( (SELECT id FROM hosts WHERE hostname = %s),%s,%s,%s,%s) ON DUPLICATE KEY UPDATE start_time_ts=%s, end_time_ts=%s""" cur.execute(query, [hostname, filename, start, end, jobid, start, end]) except mdb.IntegrityError as e: if e[0] != 1062: raise e # else: # Todo - check that the blobs match on duplicate records self.buffered += 1 if self.buffered > 100: self.con.commit() self.buffered = 0
def execute(self, query, args=None): try: # args is None means no string interpolation return self.cursor.execute(query, args) except Database.OperationalError as e: # Map some error codes to IntegrityError, since they seem to be # misclassified and Django would prefer the more logical place. if e.args[0] in self.codes_for_integrityerror: six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2]) raise
def executemany(self, query, args): try: return self.cursor.executemany(query, args) except Database.OperationalError as e: # Map some error codes to IntegrityError, since they seem to be # misclassified and Django would prefer the more logical place. if e.args[0] in self.codes_for_integrityerror: six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2]) raise
def check_constraints(self, table_names=None): """ Checks each table name in `table_names` for rows with invalid foreign key references. This method is intended to be used in conjunction with `disable_constraint_checking()` and `enable_constraint_checking()`, to determine if rows with invalid references were entered while constraint checks were off. Raises an IntegrityError on the first invalid foreign key reference encountered (if any) and provides detailed information about the invalid reference in the error message. Backends can override this method if they can more directly apply constraint checking (e.g. via "SET CONSTRAINTS ALL IMMEDIATE") """ cursor = self.cursor() if table_names is None: table_names = self.introspection.table_names(cursor) for table_name in table_names: primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name) if not primary_key_column_name: continue key_columns = self.introspection.get_key_columns(cursor, table_name) for column_name, referenced_table_name, referenced_column_name in key_columns: cursor.execute(""" SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING LEFT JOIN `%s` as REFERRED ON (REFERRING.`%s` = REFERRED.`%s`) WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL""" % (primary_key_column_name, column_name, table_name, referenced_table_name, column_name, referenced_column_name, column_name, referenced_column_name)) for bad_row in cursor.fetchall(): raise utils.IntegrityError("The row in table '%s' with primary key '%s' has an invalid " "foreign key: %s.%s contains a value '%s' that does not have a corresponding value in %s.%s." % (table_name, bad_row[0], table_name, column_name, bad_row[1], referenced_table_name, referenced_column_name))
def insertMonData(mondata): try: data = json.loads(mondata) timeOfData = int(data['Time']) hostIndex = monTables[fnvhash(data['Host']) % len(monTables)] sql = "insert into `%s` (`host`,`mem_free`,`mem_usage`,`mem_total`,`load_avg`,`time`) VALUES('%s', '%d', '%d', '%d', '%s', '%d')" % \ (hostIndex, data['Host'], data['MemFree'], data['MemUsage'], data['MemTotal'], data['LoadAvg'], timeOfData) ret = cur.execute(sql) except mysql.IntegrityError: pass
def delete(cls, id): sql = 'delete from movie_order where id=%s' try: store.execute(sql, id) store.commit() except IntegrityError: store.rollback() return False cls.clear_mc(id) return True
def delete(cls, id): sql = 'delete from movie_order where id=%s' try: store.execute(sql, id) store.commit() except IntegrityError: store.rollback() return False return True
def checkValid(item): starttime = datetime.datetime.now() rst = checkProxy(proxyIP=item.ip_port,protocol="http",timeout=5) costtimie = (datetime.datetime.now()-starttime).seconds if rst is not None and rst["status"] == "ok": proxy = freshProxy(ip_port=item.ip_port, type=item.type, location=rst["rstLocation"].encode("utf-8"), speed=costtimie, source=item.source, rule_id=item.rule_id, lastcheck=datetime.datetime.now() ) print rst["rstIP"] print rst["rstLocation"].encode("utf-8") session=loadSession() try: session.merge(proxy) session.commit() except MySQLdb.IntegrityError, e: print e.message else: deleteProxy(item)
def get_buckets_acls(self, resource_name, timestamp): """Select the bucket acls from a bucket acls snapshot table. Args: resource_name (str): String of the resource name. timestamp (str): String of timestamp, formatted as YYYYMMDDTHHMMSSZ. Returns: list: List of bucket acls. Raises: MySQLError: An error with MySQL has occurred. """ bucket_acls = {} cnt = 0 try: bucket_acls_sql = select_data.BUCKET_ACLS.format(timestamp) rows = self.execute_sql_with_fetch(resource_name, bucket_acls_sql, None) for row in rows: bucket_acl = bkt_acls.\ BucketAccessControls(bucket=row['bucket'], entity=row['entity'], email=row['email'], domain=row['domain'], role=row['role'], project_number=row['project_number']) bucket_acls[cnt] = bucket_acl cnt += 1 except (DataError, IntegrityError, InternalError, NotSupportedError, OperationalError, ProgrammingError) as e: LOGGER.error(errors.MySQLError(resource_name, e)) return bucket_acls
def get_bigquery_acls(self, resource_name, timestamp): """Select the Big Query acls from a Big Query acls snapshot table. Args: resource_name (str): String of the resource name. timestamp (str): String of timestamp, formatted as YYYYMMDDTHHMMSSZ. Returns: dict: Dictionary keyed by the count of ACLs and then the ACLs. Raises: MySQLError: An error with MySQL has occurred. """ bigquery_acls = {} cnt = 0 try: bigquery_acls_sql = select_data.BIGQUERY_ACLS.format(timestamp) rows = self.execute_sql_with_fetch(resource_name, bigquery_acls_sql, None) except (DataError, IntegrityError, InternalError, NotSupportedError, OperationalError, ProgrammingError) as e: LOGGER.error(errors.MySQLError(resource_name, e)) for row in rows: bigquery_acl = bq_acls.BigqueryAccessControls( dataset_id=row['dataset_id'], special_group=row['access_special_group'], user_email=row['access_user_by_email'], domain=row['access_domain'], role=row['role'], group_email=row['access_group_by_email'], project_id=row['project_id']) bigquery_acls[cnt] = bigquery_acl cnt += 1 return bigquery_acls
def _get_cloudsql_instance_acl_map(self, resource_name, timestamp): """Create CloudSQL instance acl map. Args: resource_name (str): String of the resource name. timestamp (str): String of timestamp, formatted as YYYYMMDDTHHMMSSZ. Returns: dict: Map of instance acls. Raises: MySQLError: An error with MySQL has occurred. """ cloudsql_acls_map = {} try: cloudsql_acls_sql = select_data.CLOUDSQL_ACLS.format(timestamp) rows = self.execute_sql_with_fetch(resource_name, cloudsql_acls_sql, None) for row in rows: acl_list = [] project_number = row['project_number'] instance_name = row['instance_name'] network = row['value'] hash_key = hash(str(project_number) + ',' + instance_name) if hash_key in cloudsql_acls_map: if network not in cloudsql_acls_map[hash_key]: cloudsql_acls_map[hash_key].append(network) else: acl_list.append(network) cloudsql_acls_map[hash_key] = acl_list except (DataError, IntegrityError, InternalError, NotSupportedError, OperationalError, ProgrammingError) as e: LOGGER.error(errors.MySQLError(resource_name, e)) return cloudsql_acls_map
def load_data(self, resource_name, timestamp, data): """Load data into a snapshot table. Args: resource_name (str): String of the resource name. timestamp (str): String of timestamp, formatted as YYYYMMDDTHHMMSSZ. data (iterable): An iterable or a list of data to be uploaded. Raises: MySQLError: When an error has occured while executing the query. """ with csv_writer.write_csv(resource_name, data) as csv_file: try: snapshot_table_name = self._create_snapshot_table_name( resource_name, timestamp) load_data_sql = load_data_sql_provider.provide_load_data_sql( resource_name, csv_file.name, snapshot_table_name) LOGGER.debug('SQL: %s', load_data_sql) cursor = self.conn.cursor() cursor.execute(load_data_sql) self.conn.commit() # TODO: Return the snapshot table name so that it can be tracked # in the main snapshot table. except (DataError, IntegrityError, InternalError, NotSupportedError, OperationalError, ProgrammingError) as e: raise MySQLError(resource_name, e)
def get_latest_snapshot_timestamp(self, statuses): """Select the latest timestamp of the completed snapshot. Args: statuses (tuple): The tuple of snapshot statuses to filter on. Returns: str: The string timestamp of the latest complete snapshot. Raises: MySQLError: When no rows are found. """ # Build a dynamic parameterized query string for filtering the # snapshot statuses if not isinstance(statuses, tuple): statuses = ('SUCCESS',) status_params = ','.join(['%s']*len(statuses)) filter_clause = SNAPSHOT_STATUS_FILTER_CLAUSE.format(status_params) try: cursor = self.conn.cursor() cursor.execute( select_data.LATEST_SNAPSHOT_TIMESTAMP + filter_clause, statuses) row = cursor.fetchone() if row: return row[0] raise NoResultsError('No snapshot cycle found.') except (DataError, IntegrityError, InternalError, NotSupportedError, OperationalError, ProgrammingError, NoResultsError) as e: raise MySQLError('snapshot_cycles', e)
def get_cloudsql_acls(self, resource_name, timestamp): """Select the cloudsql acls for project from a snapshot table. Args: resource_name (str): String of the resource name. timestamp (str): String of timestamp, formatted as YYYYMMDDTHHMMSSZ. Returns: list: List of cloudsql acls. Raises: MySQLError: An error with MySQL has occurred. """ cloudsql_acls = {} cnt = 0 try: cloudsql_instances_sql = ( select_data.CLOUDSQL_INSTANCES.format(timestamp)) rows = self.execute_sql_with_fetch(resource_name, cloudsql_instances_sql, None) acl_map = self._get_cloudsql_instance_acl_map(resource_name, timestamp) for row in rows: project_number = row['project_number'] instance_name = row['name'] ssl_enabled = row['settings_ip_configuration_require_ssl'] authorized_networks = self.\ _get_networks_for_instance(acl_map, project_number, instance_name) cloudsql_acl = csql_acls.\ CloudSqlAccessControl(instance_name=instance_name, authorized_networks=authorized_networks, ssl_enabled=ssl_enabled, project_number=project_number) cloudsql_acls[cnt] = cloudsql_acl cnt += 1 except (DataError, IntegrityError, InternalError, NotSupportedError, OperationalError, ProgrammingError) as e: LOGGER.error(errors.MySQLError(resource_name, e)) return cloudsql_acls