我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用sqlalchemy.exc.SQLAlchemyError()。
def get_metrics(self, unit_type, sub_type): """ Handler to get supported metrics for a unit_type :param unit_type: :param sub_type :return: """ log.info("Request received to get supported metrics for unit_type %s and subtype %s" %(unit_type, sub_type) ) conn = sql_helper.engine.connect() sql_query = query_list.GET_METRICS params = sub_type final_query = sql_query % '"'+params+'"' try: #result = conn.execute(sql_query, params) result = conn.execute(final_query) except SQLAlchemyError as ex: return "Unable to get the metrics list because of database\ exception" log.info("Request to get supported metrics for %s successfully\ processed" % sub_type) return json.dumps(dict(result.fetchall()))
def consumer(self): self.logger.info(f"{self.name}: consumer coroutine started!") while True: source, events, last_id = await self.queue.get() self.logger.info(f"{self.name}: got group of events from queue") try: with self.sql.transaction() as trans: if events is not None: await self.write(trans, source, events) await self.update(trans, source, last_id) except SQLAlchemyError: self.logger.error(f"{self.name}: error during event write", exc_info=1) self.queue.task_done()
def get_pdns_record(self, dn=None, ip=None, start='2016-01-01', end=datetime.date.today()): session = self.Session() try: if dn: result = session.query(Pdns_records).filter( Pdns_records.query.like('%{}%'.format(dn)), Pdns_records.first_seen >= start, Pdns_records.last_seen <= '{} 23:59:59'.format(end)) elif ip: result = session.query(Pdns_records).filter( Pdns_records.answer.like('%P{%'.format(ip)), Pdns_records.first_seen >= start, Pdns_records.last_seen <= '{} 23:59:59'.format(end)) return result except SQLAlchemyError as e: logging.info((str(e))) finally: session.close()
def gen_commit_deco(DBSession): def wrap(func): @functools.wraps(func) def wrapper(*args, **kwargs): register_db_commit = getattr(db_ctx, 'register_db_commit', False) if not register_db_commit: db_ctx.register_db_commit = True result = func(*args, **kwargs) if not register_db_commit: try: DBSession().flush() DBSession().commit() except SQLAlchemyError: DBSession().rollback() raise finally: DBSession().close() delattr(db_ctx, 'register_db_commit') return result return wrapper return wrap
def __init__(self, db_url): """ Initialize the Peekaboo database handler. :param db_url: An RFC 1738 URL that points to the database. """ self.__engine = create_engine(db_url) self.__db_con = None session_factory = sessionmaker(bind=self.__engine) self.__Session = scoped_session(session_factory) self.__lock = threading.RLock() try: self.__db_con = self.__engine.connect() except SQLAlchemyError as e: raise PeekabooDatabaseError( 'Unable to connect to the database: %s' % e ) if not self.__db_con.dialect.has_table(self.__engine, '_meta'): self._init_db() logger.debug('Database schema created.') else: self.clear_in_progress()
def clear_in_progress(self): """ Remove all samples with the result 'inProgress'. """ session = self.__Session() in_progress = PeekabooDatabase.__get( session, AnalysisResult, name='inProgress' ) in_progress_samples = session.query(SampleInfo).filter_by( result=in_progress ).all() for in_progress_sample in in_progress_samples: session.query(AnalysisJournal).filter_by( sample=in_progress_sample ).delete() try: session.commit() logger.debug('Cleared the database from "inProgress" entries.') except SQLAlchemyError as e: session.rollback() raise PeekabooDatabaseError( 'Unable to clear the database from "inProgress" entries: %s' % e ) finally: session.close()
def delete_search(search_id): if current_user.is_anonymous: return json_failed("You need to be logged in to delete a search.") search = SavedSearch.query.filter_by(id=search_id).first() if (not search) or (search.owner != current_user): return json_failed('Invalid search id. Either the given search id does' ' not exist, or it does not belong to you.') try: db.session.delete(search) db.session.commit() return jsonify({ 'status': 'success', 'search_id': search.id, 'search_terms': search.search_terms }) except exc.SQLAlchemyError: # TODO log this return json_failed('Something went wrong while deleting your search ' 'from our database.')
def delete_email_address(): if current_user.is_anonymous: flash('You need to be logged in to do that') return redirect(url_for('index')) try: current_user.email = None db.session.commit() except exc.SQLAlchemyError: # TODO log this flash('Something went wrong while deleting your email from our database.') db.session.rollback() oauth = OAuthSignIn.get_provider('facebook') # Strip out the 'facebook$' at the start of the id user_id = re.findall('\d+', current_user.social_id)[0] permission_revoked = oauth.revoke_email_permission(user_id) if not permission_revoked: flash('There was a problem giving up the permission to access your email address. ' 'It may be re-added to your account here the next time you sign in. ' 'To permanently remove it, please use your privacy settings in Facebook.') return redirect(url_for('index'))
def import_channel_data(self): unflushed_rows = 0 try: for model in self.content_models: mapping = self.schema_mapping.get(model, {}) row_mapper = self.generate_row_mapper(mapping.get('per_row')) table_mapper = self.generate_table_mapper(mapping.get('per_table')) logging.info('Importing {model} data'.format(model=model.__name__)) unflushed_rows = self.table_import(model, row_mapper, table_mapper, unflushed_rows) self.destination.session.commit() except SQLAlchemyError as e: # Rollback the transaction if any error occurs during the transaction self.destination.session.rollback() # Reraise the exception to prevent other errors occuring due to the non-completion raise e
def setup(self): self.default_TTL = CONF.ProjectManager.default_TTL self.default_share = CONF.ProjectManager.default_share db_connection = CONF.ProjectManager.db_connection pool_size = CONF.ProjectManager.db_pool_size pool_recycle = CONF.ProjectManager.db_pool_recycle max_overflow = CONF.ProjectManager.db_max_overflow try: self.db_engine = create_engine(db_connection, pool_size=pool_size, pool_recycle=pool_recycle, max_overflow=max_overflow) except SQLAlchemyError as ex: LOG.error(ex) raise ex self.configured = False self.keystone_manager = self.getManager("KeystoneManager") self.createTable()
def _removeProject(self, project, force=False): if not force: if project.getId() not in self.projects.keys(): raise SynergyError("project %s not found!" % project.getId()) self.projects.pop(project.getId()) connection = self.db_engine.connect() trans = connection.begin() try: QUERY = "delete from project where id=%s" connection.execute(QUERY, [project.getId()]) trans.commit() except SQLAlchemyError as ex: trans.rollback() raise SynergyError(ex.message) finally: connection.close() LOG.info("removed project %r" % project.getName()) self.notify(event_type="PROJECT_REMOVED", project=project)
def getUsage(self, prj_id): result = 0 connection = self.db_engine.connect() try: QUERY = "select count(*) from `%s` " % self.getName() QUERY += "where prj_id=%s" qresult = connection.execute(QUERY, [prj_id]) row = qresult.fetchone() result = row[0] except SQLAlchemyError as ex: raise SynergyError(ex.message) finally: connection.close() return result
def _createTable(self): if not self.db_engine: return TABLE = """CREATE TABLE IF NOT EXISTS `%s` (`id` BIGINT NOT NULL \ AUTO_INCREMENT PRIMARY KEY, `priority` INT DEFAULT 0, user_id CHAR(40) \ NOT NULL, prj_id CHAR(40) NOT NULL, `retry_count` INT DEFAULT 0, \ `creation_time` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, `last_update` \ TIMESTAMP NULL, `data` TEXT NOT NULL) ENGINE=InnoDB""" % self.getName() connection = self.db_engine.connect() try: connection.execute(TABLE) except SQLAlchemyError as ex: raise SynergyError(ex.message) except Exception as ex: raise SynergyError(ex.message) finally: connection.close()
def _getItemDataDB(self, item): if not item or not self.db_engine: return data = None connection = self.db_engine.connect() try: QUERY = "select data from `%s`" % self.getName() QUERY += " where id=%s" result = connection.execute(QUERY, [item.getId()]) row = result.fetchone() data = json.loads(row[0]) except SQLAlchemyError as ex: raise SynergyError(ex.message) finally: connection.close() item.setData(data) return data
def confirm_registration(username): user = User.query.filter_by(username=username).first() secret_code = request.json.get('registrationCode') if user is None: abort(400) if int(secret_code) == int(user.registration_code): try: user.registration_confirmed = True db.session.add(user) db.session.commit() except SQLAlchemyError as ex: app.logger.info( 'Could not commit user to database: {0}'.format(ex) ) abort(500) return jsonify(user.serialize) else: app.logger.info( 'User {0}: Secret code did not match.'.format(user.username) ) abort(400)
def get_latest_analysis_by_hash(algorithm, artifact_hash, projection=None): """Note: has to be called inside flask request context.""" if algorithm not in ['sha1', 'sha256', 'md5']: return None contains_dict = {'details': [{"artifact": True, algorithm: artifact_hash}]} try: return rdb.session.query(Analysis).\ join(WorkerResult).\ filter(WorkerResult.worker == 'digests').\ filter(WorkerResult.task_result.contains(contains_dict)).\ order_by(Analysis.started_at.desc()).\ first() except SQLAlchemyError: rdb.session.rollback() raise
def retrieve_worker_results(rdb, external_request_id): start = datetime.datetime.now() try: query = rdb.session.query(WorkerResult) \ .filter(WorkerResult.external_request_id == external_request_id) results = query.all() except (NoResultFound, MultipleResultsFound): return None except SQLAlchemyError: rdb.session.rollback() raise elapsed_seconds = (datetime.datetime.now() - start).total_seconds() msg = "It took {t} seconds to retrieve " \ "all worker results for {r}.".format(t=elapsed_seconds, r=external_request_id) current_app.logger.debug(msg) return results
def retrieve_worker_result(rdb, external_request_id, worker): start = datetime.datetime.now() try: query = rdb.session.query(WorkerResult) \ .filter(WorkerResult.external_request_id == external_request_id, WorkerResult.worker == worker) result = query.one() except (NoResultFound, MultipleResultsFound): return None except SQLAlchemyError: rdb.session.rollback() raise result_dict = result.to_dict() elapsed_seconds = (datetime.datetime.now() - start).total_seconds() msg = "It took {t} seconds to retrieve {w} " \ "worker results for {r}.".format(t=elapsed_seconds, w=worker, r=external_request_id) current_app.logger.debug(msg) return result_dict
def spike_init(): logging.info("Initializing Spike") timestamp = int(time()) app = create_app(__get_config_file()) db.init_app(app) with app.app_context(): db.create_all() for r in rulesets_seeds: logging.info("Adding ruleset: %s", r) rmks = "Ruleset for %s / auto-created %s" % (r, strftime("%F - %H:%M", localtime(time()))) db.session.add(NaxsiRuleSets(r, rmks, timestamp)) for w in whitelists_seeds: logging.info("Adding whitelistset: %s", w) rmks = "Ruleset for %s / auto-created %s" % (w, strftime("%F - %H:%M", localtime(time()))) db.session.add(NaxsiWhitelistSets(w, rmks, timestamp)) try: db.session.commit() except SQLAlchemyError: logging.error('It seems that the database was already initialized. Did you meant to run `%s run` instead?', sys.argv[0]) logging.info('Spike initialization completed')
def display_edit(algorithm_id): algorithm = Algorithm.query.filter_by(id=algorithm_id).first() form = CreateAlgorithmForm(request.form) if request.method == 'POST' and form.validate(): name = form.name.data code = form.code.data current_app.logger.info('Edit a algorithm %s.', (name)) algorithm.name = name algorithm.code = code try: db.session.commit() flash('Algorithm successfully edited.') return redirect(url_for('setting.display_index')) except exc.SQLAlchemyError as e: flash('Algorithm was not edited.') current_app.logger.error(e) form.name.data = algorithm.name form.code.data = algorithm.code return render_template("setting_edit_algorithm.html", form=form)
def display_new_algorithm(): form = CreateAlgorithmForm(request.form) if request.method == 'POST' and form.validate(): name = form.name.data code = form.code.data current_app.logger.info('Adding a new algorithm %s.', (name)) algorithm = Algorithm(name, current_user.id, code=code) try: db.session.add(algorithm) db.session.commit() flash('Algorithm successfully created.') return redirect(url_for('setting.display_index')) except exc.SQLAlchemyError as e: flash('Algorithm was not created.') current_app.logger.error(e) return render_template('setting_new_algorithm.html', form=form)
def fetch(self, lock=True, machine=""): """Fetches a task waiting to be processed and locks it for running. @return: None or task """ session = self.Session() row = None try: if machine != "": row = session.query(Task).filter_by(status=TASK_PENDING).filter_by(machine=machine).order_by("priority desc, added_on").first() else: row = session.query(Task).filter_by(status=TASK_PENDING).order_by("priority desc, added_on").first() if not row: return None if lock: self.set_status(task_id=row.id, status=TASK_RUNNING) session.refresh(row) return row except SQLAlchemyError as e: log.debug("Database error fetching task: {0}".format(e)) session.rollback() finally: session.close()
def guest_start(self, task_id, name, label, manager): """Logs guest start. @param task_id: task identifier @param name: vm name @param label: vm label @param manager: vm manager @return: guest row id """ session = self.Session() guest = Guest(name, label, manager) try: session.query(Task).get(task_id).guest = guest session.commit() session.refresh(guest) return guest.id except SQLAlchemyError as e: log.debug("Database error logging guest start: {0}".format(e)) session.rollback() return None finally: session.close()
def guest_stop(self, guest_id): """Logs guest stop. @param guest_id: guest log entry id """ session = self.Session() try: session.query(Guest).get(guest_id).shutdown_on = datetime.now() session.commit() except SQLAlchemyError as e: log.debug("Database error logging guest stop: {0}".format(e)) session.rollback() except TypeError: log.warning("Data inconsistency in guests table detected, it might be a crash leftover. Continue") session.rollback() finally: session.close()
def get_file_types(self): """Get sample filetypes @return: A list of all available file types """ session = self.Session() try: unfiltered = session.query(Sample.file_type).group_by(Sample.file_type) res = [] for asample in unfiltered.all(): res.append(asample[0]) res.sort() except SQLAlchemyError as e: log.debug("Database error getting file_types: {0}".format(e)) return 0 finally: session.close() return res
def view_task(self, task_id, details=False): """Retrieve information on a task. @param task_id: ID of the task to query. @return: details on the task. """ session = self.Session() try: if details: task = session.query(Task).options(joinedload("guest"), joinedload("errors"), joinedload("tags")).get(task_id) else: task = session.query(Task).get(task_id) except SQLAlchemyError as e: log.debug("Database error viewing task: {0}".format(e)) return None else: if task: session.expunge(task) return task finally: session.close()
def delete_task(self, task_id): """Delete information on a task. @param task_id: ID of the task to query. @return: operation status. """ session = self.Session() try: task = session.query(Task).get(task_id) session.delete(task) session.commit() except SQLAlchemyError as e: log.debug("Database error deleting task: {0}".format(e)) session.rollback() return False finally: session.close() return True
def view_sample(self, sample_id): """Retrieve information on a sample given a sample id. @param sample_id: ID of the sample to query. @return: details on the sample used in sample: sample_id. """ session = self.Session() try: sample = session.query(Sample).get(sample_id) except AttributeError: return None except SQLAlchemyError as e: log.debug("Database error viewing task: {0}".format(e)) return None else: if sample: session.expunge(sample) finally: session.close() return sample
def find_sample(self, md5=None, sha1=None, sha256=None): """Search samples by MD5, SHA1, or SHA256. @param md5: md5 string @param sha1: sha1 string @param sha256: sha256 string @return: matches list """ session = self.Session() try: if md5: sample = session.query(Sample).filter_by(md5=md5).first() elif sha1: sample = session.query(Sample).filter_by(sha1=sha1).first() elif sha256: sample = session.query(Sample).filter_by(sha256=sha256).first() except SQLAlchemyError as e: log.debug("Database error searching sample: {0}".format(e)) return None else: if sample: session.expunge(sample) finally: session.close() return sample
def prune_target(engine, target_name, target_type): if target_type == 'user': metrics.incr('users_purged') else: metrics.incr('others_purged') try: engine.execute('''DELETE FROM `target` WHERE `name` = %s AND `type_id` = (SELECT `id` FROM `target_type` WHERE `name` = %s)''', (target_name, target_type)) logger.info('Deleted inactive target %s', target_name) # The user has messages or some other user data which should be preserved. # Just mark as inactive. except IntegrityError: logger.info('Marking target %s inactive', target_name) engine.execute('''UPDATE `target` SET `active` = FALSE WHERE `name` = %s AND `type_id` = (SELECT `id` FROM `target_type` WHERE `name` = %s)''', (target_name, target_type)) except SQLAlchemyError as e: logger.error('Deleting target %s failed: %s', target_name, e) metrics.incr('sql_errors')
def api_people(entity=None): """ api route for people :param entity: is the id of a person or None :return: json with results or 404 """ try: person_id = request.args.get('id') if entity is None and person_id is None: return get_all_from_category(Person) else: if entity is not None: data = db.session.query(Person).filter_by( person_id=entity).one() else: data = db.session.query(Person).filter_by( person_id=person_id).one() return json.dumps(data.dictionary()) except SQLAlchemyError: print("Get people failed") abort(404)
def api_companies(entity=None): """ api route for companies :param entity: is the id of a company or None :return: json with results or 404 """ try: company_id = request.args.get('id') if entity is None and company_id is None: return get_all_from_category(Company) else: if entity is not None: data = db.session.query(Company).filter_by( company_id=entity).one() else: data = db.session.query(Company).filter_by( company_id=company_id).one() return json.dumps(data.dictionary()) except SQLAlchemyError: print("Get companies failed") abort(404)
def api_financialorgs(entity=None): """ api route for financial org :param entity: is the id of a financial org or None :return: json with results or 404 """ try: finorg_id = request.args.get('id') if entity is None and finorg_id is None: return get_all_from_category(FinancialOrg) else: if entity is not None: data = db.session.query(FinancialOrg).filter_by( financial_org_id=entity).one() else: data = db.session.query(FinancialOrg).filter_by( financial_org_id=finorg_id).one() return json.dumps(data.dictionary()) except SQLAlchemyError: print("Get financial orgs failed") abort(404)
def api_cities(entity=None): """ api route for cities :param entity: is the id of a city or None :return: json with results or 404 """ try: city_id = request.args.get('id') if entity is None and city_id is None: return get_all_from_category(City) else: if entity is not None: data = db.session.query(City).filter_by(city_id=entity).one() else: data = db.session.query(City).filter_by(city_id=city_id).one() return json.dumps(data.dictionary()) except SQLAlchemyError: print("Get cities failed") abort(404)
def get_analysis_count(self, ecosystem, package, version): """Get count of previously scheduled analysis for given EPV triplet. :param ecosystem: str, Ecosystem name :param package: str, Package name :param version: str, Package version :return: analysis count """ if ecosystem == 'maven': package = MavenCoordinates.normalize_str(package) try: count = PostgresBase.session.query(Analysis).\ join(Version).join(Package).join(Ecosystem).\ filter(Ecosystem.name == ecosystem).\ filter(Package.name == package).\ filter(Version.identifier == version).\ count() except SQLAlchemyError: PostgresBase.session.rollback() raise return count
def get_finished_task_names(analysis_id): """Get name of tasks that finished in Analysis. :param analysis_id: analysis id for which task names should retrieved :return: a list of task names """ try: task_names = PostgresBase.session.query(WorkerResult.worker).\ join(Analysis).\ filter(Analysis.id == analysis_id).\ filter(WorkerResult.error.is_(False)).\ all() except SQLAlchemyError: PostgresBase.session.rollback() raise return list(chain(*task_names))
def get_analysed_versions(ecosystem, package): """Return all already analysed versions for the given package. :param ecosystem: str, Ecosystem name :param package: str, Package name return: a list of package version identifiers of already analysed versions """ try: return chain(*PostgresBase.session.query(Version.identifier). join(Analysis).join(Package).join(Ecosystem). filter(Ecosystem.name == ecosystem). filter(Package.name == package). filter(Analysis.finished_at.isnot(None)). distinct().all()) except SQLAlchemyError: PostgresBase.session.rollback() raise
def get_analysis_by_id(self, analysis_id): """Get result of previously scheduled analysis :param analysis_id: str, ID of analysis :return: analysis result """ try: return PostgresBase.session.query(PackageAnalysis).\ filter(PackageAnalysis.id == analysis_id).\ one() except (NoResultFound, MultipleResultsFound): raise except SQLAlchemyError: PostgresBase.session.rollback() raise
def get_analysis_count(self, ecosystem, package): """Get count of previously scheduled analyses for given ecosystem-package. :param ecosystem: str, Ecosystem name :param package: str, Package name :return: analysis count """ if ecosystem == 'maven': package = MavenCoordinates.normalize_str(package) try: count = PostgresBase.session.query(PackageAnalysis).\ join(Package).join(Ecosystem).\ filter(Ecosystem.name == ecosystem).\ filter(Package.name == package).\ count() except SQLAlchemyError: PostgresBase.session.rollback() raise return count
def get_finished_task_names(analysis_id): """Get name of tasks that finished in Analysis. :param analysis_id: analysis id for which task names should retrieved :return: a list of task names """ try: task_names = PostgresBase.session.query(PackageWorkerResult.worker).\ join(PackageAnalysis).\ filter(PackageAnalysis.id == analysis_id).\ filter(PackageWorkerResult.error.is_(False)).\ all() except SQLAlchemyError: PostgresBase.session.rollback() raise return list(chain(*task_names))
def do_run(self, arguments, s3, postgres, results): for worker_result in results.raw_analyses: # We don't want to store tasks that do book-keeping for Selinon's # Dispatcher (starting uppercase) if worker_result.worker[0].isupper(): continue if not postgres.is_real_task_result(worker_result.task_result): # Do not overwrite results stored on S3 with references to # their version - this can occur on selective task runs. continue version_id = s3.store_task_result(arguments, worker_result.worker, worker_result.task_result) # Substitute task's result with version that we got on S3 worker_result.task_result = {'version_id': version_id} try: postgres.session.commit() except SQLAlchemyError: postgres.session.rollback() raise s3.store_base_file_record(arguments, results.to_dict())
def execute(self, arguments): self._strict_assert(arguments.get('external_request_id')) postgres = StoragePool.get_connected_storage('BayesianPostgres') try: results = postgres.session.query(StackAnalysisRequest)\ .filter(StackAnalysisRequest.id == arguments.get('external_request_id'))\ .first() except SQLAlchemyError: postgres.session.rollback() raise manifests = [] if results is not None: row = results.to_dict() request_json = row.get("requestJson", {}) manifests = request_json.get('manifest', []) return {'manifest': manifests}
def run(self, arguments): self._strict_assert(arguments.get('document_id')) try: record = self.storage.session.query(Analysis).\ filter(Analysis.id == arguments['document_id']).one() record.finished_at = json_serial(datetime.datetime.utcnow()) record.release = '{}:{}:{}'.format(arguments.get('ecosystem'), arguments.get('name'), arguments.get('version')) self.storage.session.commit() except SQLAlchemyError: self.storage.session.rollback() raise # Commented out for now since we want to sync to S3 # if self.task_name.endswith('Error'): # raise RuntimeError("Flow %s failed" % self.flow_name)
def get_package_dependents_count(ecosystem_backend, package, db_session=None): """Get number of GitHub projects dependent on the `package`. :param ecosystem_backend: str, Ecosystem backend from `f8a_worker.enums.EcosystemBackend` :param package: str, Package name :param db_session: obj, Database session to use for querying :return: number of dependent projects, or -1 if the information is not available """ if not db_session: storage = StoragePool.get_connected_storage("BayesianPostgres") db_session = storage.session try: count = db_session.query(PackageGHUsage.count).filter(PackageGHUsage.name == package) \ .filter(PackageGHUsage.ecosystem_backend == ecosystem_backend) \ .order_by(desc(PackageGHUsage.timestamp)) \ .first() except SQLAlchemyError: db_session.rollback() raise if count: return count[0] return -1
def get_latest_analysis(ecosystem, package, version, db_session=None): """Get latest analysis for the given EPV.""" if not db_session: storage = StoragePool.get_connected_storage("BayesianPostgres") db_session = storage.session try: return db_session.query(Analysis).\ filter(Ecosystem.name == ecosystem).\ filter(Package.name == package).\ filter(Version.identifier == version).\ order_by(Analysis.started_at.desc()).\ first() except SQLAlchemyError: db_session.rollback() raise
def _edit_config(self, form_data): if form_data['cid'] is not u'': c = Config.query.get(int(form_data['cid'])) c.name = str(form_data['name']) else: #adding new config c = Config(name = form_data['name']) c.plugin = Plugin.query.filter(Plugin.name == self.name).first() c.role = "Runtime" c.args = dict() c.args['src'] = form_data['src'] c.args['dst'] = form_data['dst'] c.args['excl'] = form_data['excl'] c.args['opts'] = form_data['opts'] #commit it. try: if c.id is None: db.session.add(c) db.session.commit() except SQLAlchemyError as e: self.logger.error(e.message) return False return True # Method takes form data and changes config objects arg property with a mkrt field.
def _make_realtime(self, form_data): self.logger.debug(form_data) id = int(form_data['conf_id']) c = Config.query.get(id) if c is None: return False; new_args = copy.deepcopy(c.args) if form_data.get('enabled', False): new_args['mkrt'] = True self.add_watch(new_args) else: new_args['mkrt'] = False self.remove_watch(new_args) c.args = new_args try: db.session.commit() except SQLAlchemyError as e: self.logger.error(e.message) return False return True
def delete_tag(self, tag_name, sha256): session = self.Session() try: # First remove the tag from the sample malware_entry = session.query(Malware).filter(Malware.sha256 == sha256).first() tag = session.query(Tag).filter(Tag.tag==tag_name).first() try: malware_entry = session.query(Malware).filter(Malware.sha256 == sha256).first() malware_entry.tag.remove(tag) session.commit() except: print_error("Tag {0} does not exist for this sample".format(tag_name)) # If tag has no entries drop it count = len(self.find('tag', tag_name)) if count == 0: session.delete(tag) session.commit() print_warning("Tag {0} has no additional entries dropping from Database".format(tag_name)) except SQLAlchemyError as e: print_error("Unable to delete tag: {0}".format(e)) session.rollback() finally: session.close()
def rename(self, id, name): session = self.Session() if not name: return False try: malware = session.query(Malware).get(id) if not malware: print_error("The opened file doesn't appear to be in the database, have you stored it yet?") return False malware.name = name session.commit() except SQLAlchemyError as e: print_error("Unable to rename file: {}".format(e)) session.rollback() return False finally: session.close() return True
def delete_file(self, id): session = self.Session() try: malware = session.query(Malware).get(id) if not malware: print_error("The opened file doesn't appear to be in the database, have you stored it yet?") return False session.delete(malware) session.commit() except SQLAlchemyError as e: print_error("Unable to delete file: {0}".format(e)) session.rollback() return False finally: session.close() return True