我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用MySQLdb.Error()。
def insert_user(self,uid,name,pic): try: userid=0 bSginal=self.curosr.execute("SELECT * FROM yzy_users WHERE uid='%s'"%(uid)) if bSginal==1: results=self.curosr.fetchone() userid=results[0] else: sql = """INSERT INTO yzy_users(uid,uname,avatar) VALUES('%s','%s','%s')"""%(uid,name,pic) vsql = sql.encode('utf8') if self.curosr.execute(vsql)==1: userid=self.curosr.lastrowid except MySQLdb.Error,e: print "Error:%d:%s" % (e.args[0], e.args[1]) return userid
def populate_store(self, store): try: connection = None connection = MySQLdb.connect('localhost', 'annon', 'pass') cursor = connection.cursor() cursor.execute("Select * From `INFORMATION_SCHEMA`.`SCHEMATA`") rows = cursor.fetchall() for row in rows: store.append([row[0], row[1], row[2], row[3]]) except MySQLdb.Error, e: store.append([str(e.args[0]), e.args[1], '', '']) finally: if connection != None: connection.close()
def insertwl(): counter = 0 try: words = open(wordlist, "r") except(IOError): print "Error: check", wordlist sys.exit(1) dupes = 0 print "Inserting Wordlist, Skipping Dupes....may take ages" print "\nStart :", timer() for word in words.read().split('\n'): hash = md5.new(word).hexdigest() counter = counter+1 try: csr = dbconnect() csr.execute("INSERT INTO "+dbname+".data (plain, md5)VALUES ('"+str(word)+"', '"+str(hash)+"');") except MySQLdb.Error, e: dupes = dupes+1 print "\nDupes :", dupes print "\nDone :", timer()
def insert_pagelength(self, source_article_id, screen_positions_1920_1080, zip_file_path): data={} data['source_article_id'] = source_article_id if screen_positions_1920_1080 is not None: data['page_length_1920_1080'] = screen_positions_1920_1080 else: data['page_length_1920_1080'] = None #print data sql = "INSERT INTO page_length (id, page_length_1920_1080) VALUES" \ "(%(source_article_id)s, %(page_length_1920_1080)s);" try: self.cursor.execute(sql, data) except (MySQLdb.Error, MySQLdb.Warning), e: print ('FAIL: Data caused warning or error "%s" for source_article_id: "%s"', data, source_article_id) print 'FAIL: EXCEPTION:', e print zip_file_path
def resolve_redirect(self, name): """resolves a redirect and returns the real article name @param name the name of the redirect @return the real name of the article or None if it cannot be resolved """ try: self._cursor.execute('SELECT target_article_name FROM redirects WHERE source_article_name=%s;', (name,)) row = self._cursor.fetchone() if row != None: return row[0] except MySQLdb.Error, e: logging.error('error resolving redirect for name "%s": %s (%d)' % (name.encode('ascii', 'ignore'), e.args[1], e.args[0])) return None
def retrieve_all_articles(self): """retrieves all articles. useful for crawling or making media wiki api requests @return a list of dictionaries holding the following keys: 'id': the id of the retrieved article 'rev_id': the revision id of the retrieved article 'title': the title of the retrieved article """ articles = [] try: #self._cursor.execute('SELECT * FROM articles WHERE RAND()<=0.0006 limit 1000;') #self._cursor.execute('SELECT * FROM articles limit 1000;') self._cursor.execute('SELECT * FROM articles;') result = self._cursor.fetchall() for row in result: article = {} article['id'] = row[0] article['rev_id'] = row[1] article['title'] = row[2] articles.append(article) except MySQLdb.Error, e: logging.error('error retrieving 1000 random articles %s (%d)' % (e.args[1], e.args[0])) return articles
def retrieve_all_unique_links(self): """retrieves all links. These are the network edges @return a list of dictionaries holding the following keys: 'from': the source article id 'to': the target article id """ links = [] try: self._cursor.execute('SELECT * FROM unique_links;') result = self._cursor.fetchall() for row in result: link = {} link['from'] = row[0] link['to'] = row[1] links.append(link) except MySQLdb.Error, e: logging.error('error retrieving unique links %s (%d)' % (e.args[1], e.args[0])) return links
def retrieve_all_transitions(self): """retrieves all transitions from the wikipeida clickstream_derived that are an internal links. These are the network edges @return a list of dictionaries holding the following keys: 'from': the source article id 'to': the target article id """ links = [] try: self._cursor.execute('SELECT * FROM clickstream_derived WHERE link_type_derived LIKE %s AND NOT link_type_derived=%s;', ("internal%", "internal-nonexistent",)) result = self._cursor.fetchall() for row in result: link = {} link['from'] = row[0] link['to'] = row[1] links.append(link) except MySQLdb.Error, e: logging.error('error retrieving unique links %s (%d)' % (e.args[1], e.args[0])) return links
def retrieve_all_internal_transitions(self): """retrieves all internal links transitions from the wikipeida clickstream_derived that are an internal links. These are the network edges @return a list of dictionaries holding the following keys: 'from': the source article id 'to': the target article id """ links = [] try: self._cursor.execute('SELECT * FROM clickstream_derived WHERE link_type_derived=%s;', ("internal-link",)) result = self._cursor.fetchall() for row in result: link = {} link['from'] = row[0] link['to'] = row[1] links.append(link) except MySQLdb.Error, e: logging.error('error retrieving unique links %s (%d)' % (e.args[1], e.args[0])) return links
def retrieve_all_internal_transitions_counts(self): """retrieves all internal links transitions from the wikipeida clickstream_derived that are an internal links. These are the network edges @return a list of dictionaries holding the following keys: 'from': the source article id 'to': the target article id """ links = [] try: self._cursor.execute('SELECT * FROM clickstream_derived WHERE link_type_derived=%s;', ("internal-link",)) result = self._cursor.fetchall() for row in result: link = {} link['from'] = row[0] link['to'] = row[1] link['counts']=row[2] links.append(link) except MySQLdb.Error, e: logging.error('error retrieving unique links %s (%d)' % (e.args[1], e.args[0])) return links
def retrieve_all_links_coords(self): """retrieves all xy coord for all links in wikipeida. @return a list of coords holding the following keys: 'source_article_id': the wikipedia article id 'x': x position on screen 'y': y position on screen """ coords = [] try: self._cursor.execute('SELECT source_article_id, target_x_coord_1920_1080, target_y_coord_1920_1080 FROM links where target_x_coord_1920_1080 is not Null and target_y_coord_1920_1080 is not Null and target_x_coord_1920_1080!=0 and target_y_coord_1920_1080!=0 and source_article_id!=target_article_id;') result = self._cursor.fetchall() for row in result: link = {} link['source_article_id']= row[0] link['x'] = row[1] link['y'] = row[2] coords.append(link) except MySQLdb.Error, e: logging.error('error retrieving xy coord for all links links %s (%d)' % (e.args[1], e.args[0])) return coords
def retrieve_all_links_coords_clicks(self): """retrieves all xy coord for all links in wikipeida. @return a list of coords holding the following keys: 'source_article_id': the wikipedia article id 'x': x position on screen 'y': y position on screen """ coords = [] try: self._cursor.execute('select l.source_article_id, l.target_article_id, l.target_x_coord_1920_1080, l.target_y_coord_1920_1080, c.counts, p.page_length_1920_1080 from links l, clickstream_derived c, page_length p where l.source_article_id=c.prev_id and l.target_article_id=c.curr_id and c.link_type_derived like %s and l.source_article_id = p.id and l.target_x_coord_1920_1080 is not Null and l.target_y_coord_1920_1080 is not Null and l.target_x_coord_1920_1080!=0 and l.target_y_coord_1920_1080!=0 and l.source_article_id!=l.target_article_id;', ("internal%",)) result = self._cursor.fetchall() for row in result: link = {} link['key']= row[0], row[1] link['x'] = row[2] link['y'] = row[3] link['counts'] = row[4] link['page_length'] = row[5] coords.append(link) except MySQLdb.Error, e: logging.error('error retrieving xy coord for all links links %s (%d)' % (e.args[1], e.args[0])) return coords
def retrieve_all_links_multpile_occ(self): """retrieves all xy coord for all links in wikipeida. @return a list of coords holding the following keys: 'source_article_id': the wikipedia article id 'x': x position on screen 'y': y position on screen """ coords = [] try: self._cursor.execute('SELECT source_article_id, target_article_id, target_x_coord_1920_1080, target_y_coord_1920_1080 FROM links where target_x_coord_1920_1080 is not Null and target_y_coord_1920_1080 is not Null and target_x_coord_1920_1080!=0 and target_y_coord_1920_1080!=0 and source_article_id!=target_article_id;') result = self._cursor.fetchall() for row in result: link = {} link['key']= row[0], row[1] link['x'] = row[2] link['y'] = row[3] coords.append(link) except MySQLdb.Error, e: logging.error('error retrieving xy coord for all links links %s (%d)' % (e.args[1], e.args[0])) return coords
def pickle_category_counts_distribution(): results = {} db = MySQLDatabase(DATABASE_HOST, DATABASE_USER, DATABASE_PASSWORD, DATABASE_NAME) db_worker_view = db.get_work_view() cursor = db_worker_view._cursor for category in ['lead', 'infobox', 'body', 'left-body', 'navbox']: try: cursor.execute('select counts from link_features where counts is not null and visual_region=%s;', (category,)) result = cursor.fetchall() results[category] = result except MySQLdb.Error, e: print e try: cursor.execute('select counts from clickstream_derived_internal_links;') result = cursor.fetchall() results['counts'] = result except MySQLdb.Error, e: print e write_pickle(HOME+'output/category_counts_distribution.obj', results)
def pickle_aggregated_counts_distribution(): db = MySQLDatabase(DATABASE_HOST, DATABASE_USER, DATABASE_PASSWORD, DATABASE_NAME) db_worker_view = db.get_work_view() cursor = db_worker_view._cursor results = {} try: cursor.execute('select sum(counts) from clickstream_derived_internal_links group by prev_id;') result = cursor.fetchall() results['source_article']=result except MySQLdb.Error, e: print e try: cursor.execute('select sum(counts) from clickstream_derived_internal_links group by curr_id;') result = cursor.fetchall() results['target_article']=result except MySQLdb.Error, e: print e write_pickle(HOME+'output/aggregated_counts_distribution.obj', results)
def select(self, query, data, key): q = query if data: values = self._list_to_string(data) q = q.format(values) c = self.db.cursor() try: c.execute(q) output = self._format_results(c) if key is not None: client = Client(base_url='http://localhost') client.keys.update(KeyValuePair(name=key, value=str(output))) return key else: return output except MySQLdb.Error, e: # pylint: disable=no-member raise Exception(e)
def CheckConn(self,port): retry_num = 0 while True: try: local_conn = MySQLdb.connect(host='127.0.0.1', user=mysql_user, passwd=mysql_password, port=int(port), db='',charset="utf8") local_conn.cursor() local_conn.close() state = True break except MySQLdb.Error,e: logging.error(e) state = None retry_num += 1 time.sleep(1) if retry_num >= 3: break return state
def ChangeMaster(self,host,port): '''????''' repluser,replpassword,ssl_ca,ssl_cert,ssl_key = GetConf().GetReplAcount() try: sql = 'reset slave all;' print self.host try: self.mysql_cur.execute(sql) except: self.mysql_cur.execute('stop slave') self.mysql_cur.execute(sql) change_sql = 'change master to master_host="%s",master_port=%s,master_user="%s",master_password="%s",master_auto_position=1 for channel "default"' % (host,int(port),repluser,replpassword) self.mysql_cur.execute(change_sql) return True except MySQLdb.Warning,e: start_sql = 'start slave' self.mysql_cur.execute(start_sql) self.mysql_cur.execute('set global read_only=1;') logging.warning('Change master to %s state : Warning' % host) logging.warning(traceback.format_exc()) return True except MySQLdb.Error,e: logging.error('Change master to %s state : Error' % host) logging.error(traceback.format_exc()) return False
def db_connect(self): """?????""" if self.db_lock.acquire(): try: self.conn = MySQLdb.Connection( host=self.host, port=self.port, user=self.user, passwd=self.passwd, charset=self.charset, use_unicode=False) except MySQLdb.Error, e: log_db.error('connect error:' + str(e)) self.cursor = self.conn.cursor() if not self.cursor: raise (NameError, "Connect failure") log_db.warning("???????") self.db_lock.release()
def execute_sql_value(self, sql, value): """ ??sql?? :param sql:sql?? :param value: ??? """ try: self.cursor.execute(sql,value) except MySQLdb.Error, e: if e.args[0] == 2013 or e.args[0] == 2006: # ?????????? self.db_connect() log_db.error('??,???????') self.cursor.execute(sql) else: log_db.error('execute_no_return error:' + str(e)) log_db.error('SQL : ' + sql)
def execute_no_return(self, sql): """ ??SQL??,??????? :param sql: SQL?? """ log_db.info('??:' + str(sql[:127])) if self.db_lock.acquire(): try: self.cursor.execute(sql) except MySQLdb.Error, e: if e.args[0] == 2013 or e.args[0] == 2006: # ?????????? self.db_lock.release() self.db_connect() log_db.error('??,???????') self.cursor.execute(sql) self.db_lock.acquire() else: log_db.error('execute_no_return error:' + str(e)) log_db.error('SQL : ' + sql) self.db_lock.release() return 'execute fail' self.db_lock.release() return 'execute success'
def execute_Iterator(self, sql, pretchNum=1000): """ ??SQL??(??????) :param sql: SQL?? :param pretchNum: ?????? :return: ??? """ log_db.info('??:' + sql) Iterator_count = 0 result = None result_list = [] try: Resultnum = self.cursor.execute(sql) for i in range(Resultnum): result = self.cursor.fetchone() result_list.append(result) Iterator_count += 1 if Iterator_count == pretchNum: yield result_list result_list = [] Iterator_count = 0 yield result_list # ???????? except MySQLdb.Error, e: log_db.error('execute_Iterator error:' + str(e)) log_db.error('SQL : ' + sql)
def insert_data(self, table, my_dict): try: cols = ','.join(my_dict.keys()) values = '","'.join(my_dict.values()) values = '"' + values + '"' try: # print "table:%s,cols:%s,values:%s." %(table, cols, values) sql = "insert into %s (%s) values(%s)" % (table, cols, values) # print "sql:",sql result = self.cur.execute(sql) self.db.commit() if result: return 1 else: return 0 except MySQLdb.Error as e: self.db.rollback() if "key 'PRIMARY'" in e.args[1]: print Fore.RED + self.get_current_time(), "???????????" else: print Fore.RED + self.get_current_time(), "????????? %d: %s" % (e.args[0], e.args[1]) except MySQLdb.Error as e: print Fore.RED + self.get_current_time(), "????????%d: %s" % (e.args[0], e.args[1])
def insert_feature_data(self, database_name, data, id): if isinstance(data, np.ndarray): pass if isinstance(data, pd.DataFrame): data = data.values if data.shape[0] == 0: return None sql_line = '''insert into %s values('{id}', '{date}', {data} )''' % (database_name, ) for d in data: try: data_str = ','.join([str(x) for x in d[1:]]) line = sql_line.format(id=id, date=d[0], data=data_str) self.cur.execute(line) # self.cur.close() # self.cur = self.db.cursor() except MySQLdb.Error as e: print 'fuck' self.db.commit()
def __exit__(self, exc_type, exc_val, exc_tb): """ Context manager protocol. If db exception is fired and self.retry_attempt is not zero, it is only logged and does not propagate, otherwise it propagates up. Also open transaction is rolled back. In case of no exception, transaction gets commited. """ if not exc_type: self.con.commit() self.retry_attempt = 0 else: try: if self.con: self.con.rollback() except my.Error: pass try: self.close() except my.Error: pass if self.retry_attempt: self.log.info("Database error (%d attempts left): %s %s" % (self.retry_attempt, exc_type.__name__, exc_val)) return True
def insert_season(globalmovieid,imdb,seasonlink,season): db = getCursor() cur = db.cursor() sql = "insert into Season(movieid,season,link) values(%s,%s,%s)" try: cur.execute(sql,[globalmovieid,season,seasonlink]) db.commit() except MySQLdb.Error ,e: print e page = requests.get(seasonlink, headers=headers); tree = html.fromstring(page.content); tree.make_links_absolute(seasonlink) images = tree.xpath('//div[@class="list detail eplist"]//div[@class="image"]//img/@src') titles = tree.xpath('//div[@class="list detail eplist"]//div[@class="info"]//strong/a/@title') plot = tree.xpath('//div[@class="list detail eplist"]//div[@class="info"]//div[@class="item_description"]/text()') print len(images),len(titles),len(plot) for i in xrange(len(titles)): insert_episode(globalmovieid,imdb,season,titles[i].strip(),images[i].strip(),plot[i].strip(),i+1)
def insert_series_into_movie(imdb,globalmovieid,title,genre,content_rating,ratings,rating_value,plot,poster): db = getCursor() cur = db.cursor() if not rating_value: rating_value = 5 if not content_rating: content_rating = "R" title = remove_all_special_chars(title) plot = remove_all_special_chars(plot) sql = "insert into Movie(movieid,imdbid,title,plot,altplot,genre,ratings,ratingvalue,contentrating,poster) " \ "values('%s','%s','%s','%s','%s','%s','%s','%s','%s','%s') " % \ (globalmovieid,imdb,title.strip(),plot.strip()," ",genre,int(ratings),float(rating_value),content_rating,poster) try: cur.execute(sql) db.commit() except MySQLdb.Error, e: db.rollback() print e db.close()
def execute(self, sql): result = None if self.db_lock.acquire(): try: self.cursor.execute(sql) result = self.cursor.fetchall() except MySQLdb.Error, e: if e.args[0] == 2013 or e.args[0] == 2006: # ?????????? self.db_lock.release() self.get_connect() print str(datetime.datetime.now()).split(".")[0], '???????' result = self.execute(sql) # ???? self.db_lock.acquire() else: print str(datetime.datetime.now()).split(".")[0], "ERROR %d: %s" % (e.args[0], e.args[1]) self.db_lock.release() return result if result else None #??SQL?? ???????????????
def execute(self, sql): result = None if self.db_lock.acquire(): try: self.cursor.execute(sql) result = self.cursor.fetchall() except MySQLdb.Error, e: if e.args[0] == 2013 or e.args[0] == 2006: # ?????????? self.db_lock.release() self.get_connect() print str(datetime.datetime.now()).split(".")[0], '???????' result = self.execute(sql) # ???? self.db_lock.acquire() else: print str(datetime.datetime.now()).split(".")[0], "ERROR %d: %s" % (e.args[0], e.args[1]) self.db_lock.release() return result if result else None
def __init__(self, dbconfig=DBCONFIG): """????????????????MySQL??""" try: self._conn = MySQLdb.connect(host=dbconfig['host'], port=dbconfig['port'], user=dbconfig['user'], passwd=dbconfig['passwd'], db=dbconfig['db'], charset=dbconfig['charset']) except MySQLdb.Error, e: self.error_code = e.args[0] error_msg = 'MySQL error! ', e.args[0], e.args[1] print error_msg # ????????????????????? if self._timecount < self._TIMEOUT: interval = 5 self._timecount += interval time.sleep(interval) return self.__init__(dbconfig) else: raise Exception(error_msg) self._cur = self._conn.cursor() self._instance = MySQLdb
def execute(self, sql): result = None if self.db_lock.acquire(): try: self.cursor.execute(sql) result = self.cursor.fetchall() except MySQLdb.Error, e: if e.args[0] == 2013 or e.args[0] == 2006: # ?????????? self.db_lock.release() self.get_connect() print str(datetime.datetime.now()).split(".")[0], '???????' result = self.execute(sql) # ???? self.db_lock.acquire() else: print str(datetime.datetime.now()).split(".")[0], "ERROR %d: %s" % (e.args[0], e.args[1]) self.db_lock.release() return result if result else None # ??SQL??(??????)
def execute(self, sql): """ ??SQL?? :param sql: SQL?? :return: ??SQL???????? """ log_db.info('??:' + str(sql[:127])) result = None if self.db_lock.acquire(): try: self.cursor.execute(sql) result = self.cursor.fetchall() self.db_lock.release() except MySQLdb.Error, e: if e.args[0] == 2013 or e.args[0] == 2006: # ?????????? self.db_lock.release() self.db_connect() log_db.error('??,???????') result = self.execute(sql) # ???? self.db_lock.acquire() else: self.db_lock.release() log_db.error('execute error:' + str(e)) log_db.error('SQL : ' + sql) return result if result else None
def execute(self, sql): result = None if self.db_lock.acquire(): try: self.cursor.execute(sql) result = self.cursor.fetchall() except MySQLdb.Error, e: if e.args[0] == 2013 or e.args[0] == 2006: # ?????????? self.db_lock.release() self.get_connect() print str(datetime.datetime.now()).split(".")[0], '???????' result = self.execute(sql) # ???? self.db_lock.acquire() else: print str(datetime.datetime.now()).split(".")[0], "ERROR %d: %s" % (e.args[0], e.args[1]) self.db_lock.release() return result if result else None # ???
def execute(self, sql): """??SQL??""" result = None if self.db_lock.acquire(): try: self.cursor.execute(sql) result = self.cursor.fetchall() except MySQLdb.Error, e: if e.args[0] == 2013 or e.args[0] == 2006: # ?????????? self.db_lock.release() self.get_connect() print str(datetime.datetime.now()).split(".")[0], '???????' result = self.execute(sql) # ???? self.db_lock.acquire() else: print str(datetime.datetime.now()).split(".")[0], "ERROR %d: %s" % (e.args[0], e.args[1]) self.db_lock.release() return result if result else None
def get_domains(self): global domains domains = [] results = [] try: db = MySQLdb.connect(host='172.26.253.3', user='root', passwd='platform', charset = "utf8", db='malicious_domain_sys') cur = db.cursor() sql = 'SELECT locate.id, domain_index.domain FROM locate,domain_index WHERE locate.id = domain_index.id AND locate.flag > 0 AND locate.flag mod 10 = 0' count = cur.execute(sql) # results = cur.fetchall() domains = cur.fetchall() # for result in results: # if result[0].split('.')[-1] == 'cn': # domains.append(result[0]) db.close() except MySQLdb.Error,e: raise e
def execute_no_return(self, sql): """ ??SQL??,??????? :param sql: SQL?? """ if self.db_lock.acquire(): try: self.cursor.execute(sql) except MySQLdb.Error as e: if e.args[0] == 2013 or e.args[0] == 2006: # ?????????? self.db_lock.release() self.db_connect() print ('??,???????') self.cursor.execute(sql) self.db_lock.acquire() else: print ('execute_no_return error:' + str(e)) print ('SQL : ' + sql) return 'execute fail' except: print 'unexcept error occur!' print sql self.db_lock.release() return 'execute success'
def execute(self, sql): """ ??SQL?? :param sql: SQL?? :return: ??SQL???????? """ result = None if self.db_lock.acquire(): try: self.cursor.execute(sql) result = self.cursor.fetchall() except MySQLdb.Error, e: if e.args[0] == 2013 or e.args[0] == 2006: # ?????????? self.db_lock.release() self.db_connect() print ('??,???????') result = self.execute(sql) # ???? self.db_lock.acquire() else: print ('execute error:' + str(e)) print ('SQL : ' + sql) self.db_lock.release() return result if result else None
def main(user, password, db, tbl, slave, vertical, debug, version, color): """twindb_table_compare reads percona.checksums from the master and slave and shows records that differ if there are any inconsistencies.""" if version: print(__version__) exit(0) setup_logging(LOG, debug=debug, color=color) try: for database, table in get_inconsistent_tables(slave, user, password, ch_db=db, ch_tbl=tbl): get_inconsistencies(database, table, slave, user, password, ch_db=db, ch_tbl=tbl, vertical=vertical, color=color) except MySQLdb.Error as err: # pylint: disable=no-member LOG.error(err) exit(1)
def mysql_command(self,conn,sql_cmd): # ??sql try: ret = [] conn=MySQLdb.connect(host=conn["host"],user=conn["user"],passwd=conn["password"],db=conn["database"],port=conn["port"],charset="utf8") cursor = conn.cursor() n = cursor.execute(sql_cmd) for row in cursor.fetchall(): for i in row: ret.append(i) conn.commit() cursor.close() conn.close() except MySQLdb.Error,e: ret.append(e) return ret
def select_table(self,conn,sql_cmd,parmas): # ??????sql try: ret = [] conn=MySQLdb.connect(host=conn["host"],user=conn["user"],passwd=conn["password"],db=conn["database"],port=conn["port"],charset="utf8") cursor = conn.cursor() n = cursor.execute(sql_cmd,parmas) for row in cursor.fetchall(): for i in row: ret.append(i) conn.commit() cursor.close() conn.close() except MySQLdb.Error,e: ret.append(e) return ret
def run(): with open('test_set.txt', 'r') as f: try: cur = _db.cursor() j = 1 for line in f: print('Processing line: {} ({})'.format(line.rstrip(), j)) cur.execute('SELECT recording.id FROM recording '+ 'WHERE recording.filename=%s', (line.rstrip()+'.wav',)) recId = cur.fetchone()[0] cur.execute('INSERT INTO evaluation_sets (eval_set, recordingId) '+ 'VALUES (\'test_set\', %s)', (recId,)) j += 1 except MySQLdb.Error as e: msg = 'Error inserting utts into set.' log(msg, e) raise # finally commit if no exceptions _db.commit()
def getUserProgress(self, user, eval_set): """ Returns user progress into eval_set, format: { "progress": 541 } """ try: cur = self.mysql.connection.cursor() cur.execute('SELECT COUNT(*) FROM evaluation '+ 'WHERE eval_set=%s '+ 'AND evaluator=%s', (eval_set, user)) # COUNT(*) always returns a number, so no need for a try block here progress = cur.fetchone()[0] except MySQLError as e: msg = 'Error getting user progress.' log(msg + ' Eval_set: {}, user: {}'.format(eval_set, user), e) return (msg, 500) return (json.dumps(dict(progress=progress)), 200)
def getPossibleSets(self): """ Returns possible sets, format: [ "set1", "set2", .. ] or as in client-server API. """ try: cur = self.mysql.connection.cursor() cur.execute('SELECT eval_set FROM evaluation_sets '+ 'GROUP BY eval_set') sets = [x[0] for x in cur.fetchall()] except MySQLError as e: msg = 'Error getting possible sets.' log(msg, e) return (msg, 500) return (json.dumps(sets), 200)
def recCountBySession(self, sessionId): """ Returns recording count as found in database for session. 0 on failure Parameters: sessionId id of session """ try: cur = self.db.cursor() cur.execute('SELECT COUNT(*) FROM recording WHERE sessionId=%s', (sessionId,)) recCnt = cur.fetchone() if recCnt: return recCnt[0] # fetchone returns tuple return 0 except MySQLdb.Error as e: msg = 'Error grabbing rec count for session: {}'.format(sessionId) log(msg, e) raise
def connect(self, db_config): """ Connect to MySQL database Args: db_config (dict): A dictionary containing the configuration Returns: (Mysql Connector): The established MySQL connection """ try: print 'Connecting to MySQL database:', self.conn = MySQLdb.connect( host=db_config['host'], port=db_config['port'], user=db_config['user'], passwd=db_config['passwd'], db=db_config['db']) print 'OK' self.conn.autocommit(True) self.cursor = self.conn.cursor() return self.conn except MySQLdb.Error as error: print error return 0
def query_database(self, query): """ Perform a database query Args: query (str): The SQL query Returns: list: Mysql Rows """ try: self.cursor.execute(query) return self.cursor.fetchall() except MySQLdb.Error as err: # print("Failed executing query: {}".format(err)) return 0 except MySQLdb.Warning as wrn: return 0
def __execute_query(self, sql): with warnings.catch_warnings(): warnings.simplefilter('error', MySQLdb.Warning) try: self.__cur.execute(sql) return True except MySQLdb.Error, e: print "An Error occured running query. %s" %e #print sql; return False except MySQLdb.Warning, e: print "An Warning occured running query. %s" %e return True except MySQLdb.ProgrammingError, e: print "A ProgrammingError occured running query. %s" %e exit(1) return False