我们从Python开源项目中,提取了以下15个代码示例,用于说明如何使用__builtin__.True()。
def resolve_dotted_attribute(obj, attr, allow_dotted_names=True): """resolve_dotted_attribute(a, 'b.c.d') => a.b.c.d Resolves a dotted attribute name to an object. Raises an AttributeError if any attribute in the chain starts with a '_'. If the optional allow_dotted_names argument is false, dots are not supported and this function operates similar to getattr(obj, attr). """ if allow_dotted_names: attrs = attr.split('.') else: attrs = [attr] for i in attrs: if i.startswith('_'): raise AttributeError( 'attempt to access private attribute "%s"' % i ) else: obj = getattr(obj, i) return obj
def check_run_id(run_id): ''' @summary: Validate command line parameter 'run_id' @param run_id: Unique string identifying current run @return: True if input is correct ''' ret_code = True if ((run_id is None) or (len(run_id) == 0)): print 'ERROR: no run_id specified' ret_code = False return ret_code #---------------------------------------------------------------------
def __init__(self, host, client_port, mainThread, show_banner=True): BaseInterpreterInterface.__init__(self, mainThread) self.client_port = client_port self.host = host self.namespace = {} self.interpreter = InteractiveConsole(self.namespace) self._input_error_printed = False
def init_mpl_in_console(interpreter): from pydev_ipython.inputhook import set_return_control_callback def return_control(): ''' A function that the inputhooks can call (via inputhook.stdin_ready()) to find out if they should cede control and return ''' if _ProcessExecQueueHelper._debug_hook: # Some of the input hooks check return control without doing # a single operation, so we don't return True on every # call when the debug hook is in place to allow the GUI to run # XXX: Eventually the inputhook code will have diverged enough # from the IPython source that it will be worthwhile rewriting # it rather than pretending to maintain the old API _ProcessExecQueueHelper._return_control_osc = not _ProcessExecQueueHelper._return_control_osc if _ProcessExecQueueHelper._return_control_osc: return True if not interpreter.exec_queue.empty(): return True return False set_return_control_callback(return_control) from _pydev_bundle.pydev_import_hook import import_hook_manager from pydev_ipython.matplotlibtools import activate_matplotlib, activate_pylab, activate_pyplot import_hook_manager.add_module_name("matplotlib", lambda: activate_matplotlib(interpreter.enableGui)) # enable_gui_function in activate_matplotlib should be called in main thread. That's why we call # interpreter.enableGui which put it into the interpreter's exec_queue and executes it in the main thread. import_hook_manager.add_module_name("pylab", activate_pylab) import_hook_manager.add_module_name("pyplot", activate_pyplot)
def process_exec_queue(interpreter): init_mpl_in_console(interpreter) from pydev_ipython.inputhook import get_inputhook while 1: # Running the request may have changed the inputhook in use inputhook = get_inputhook() if _ProcessExecQueueHelper._debug_hook: _ProcessExecQueueHelper._debug_hook() if inputhook: try: # Note: it'll block here until return_control returns True. inputhook() except: import traceback;traceback.print_exc() try: try: code_fragment = interpreter.exec_queue.get(block=True, timeout=1/20.) # 20 calls/second except _queue.Empty: continue if hasattr(code_fragment, '__call__'): # It can be a callable (i.e.: something that must run in the main # thread can be put in the queue for later execution). code_fragment() else: more = interpreter.add_exec(code_fragment) except KeyboardInterrupt: interpreter.buffer = None continue except SystemExit: raise except: type, value, tb = sys.exc_info() traceback.print_exception(type, value, tb, file=sys.__stderr__) exit()
def exec_code(code, globals, locals, debugger): interpreterInterface = get_interpreter() interpreterInterface.interpreter.update(globals, locals) res = interpreterInterface.need_more(code) if res: return True interpreterInterface.add_exec(code, debugger) return False
def __init__(self, addr, requestHandler=SimpleXMLRPCRequestHandler, logRequests=True, allow_none=False, encoding=None): self.logRequests = logRequests SimpleXMLRPCDispatcher.__init__(self, allow_none, encoding) SocketServer.TCPServer.__init__(self, addr, requestHandler) # [Bug #1222790] If possible, set close-on-exec flag; if a # method spawns a subprocess, the subprocess shouldn't have # the listening socket open. if fcntl is not None and hasattr(fcntl, 'FD_CLOEXEC'): flags = fcntl.fcntl(self.fileno(), fcntl.F_GETFD) flags |= fcntl.FD_CLOEXEC fcntl.fcntl(self.fileno(), fcntl.F_SETFD, flags)
def line_matches(self, str, match_messages_regex, ignore_messages_regex): ''' @summary: This method checks whether given string matches against the set of regular expressions. @param str: string to match against 'match' and 'ignore' regex expressions. A string which matched to the 'match' set will be reported. A string which matches to 'match' set, but also matches to 'ignore' set - will not be reported (will be ignored) @param match_messages_regex: regex class instance containing messages to match against. @param ignore_messages_regex: regex class instance containing messages to ignore match against. @return: True is str matches regex criteria, otherwise False. ''' ret_code = False if ((match_messages_regex is not None) and (match_messages_regex.findall(str))): if (ignore_messages_regex is None): ret_code = True elif (not ignore_messages_regex.findall(str)): self.print_diagnostic_message('matching line: %s' % str) ret_code = True return ret_code #---------------------------------------------------------------------
def line_is_expected(self, str, expect_messages_regex): ''' @summary: This method checks whether given string matches against the set of "expected" regular expressions. ''' ret_code = False if (expect_messages_regex is not None) and (expect_messages_regex.findall(str)): ret_code = True return ret_code
def check_action(action, log_files_in, out_dir, match_files_in, ignore_files_in, expect_files_in): ''' @summary: This function validates command line parameter 'action' and other related parameters. @return: True if input is correct ''' ret_code = True if (action == 'init'): ret_code = True elif (action == 'analyze'): if out_dir is None or len(out_dir) == 0: print 'ERROR: missing required out_dir for analyze action' ret_code = False elif match_files_in is None or len(match_files_in) == 0: print 'ERROR: missing required match_files_in for analyze action' ret_code = False else: ret_code = False print 'ERROR: invalid action:%s specified' % action return ret_code #---------------------------------------------------------------------
def _verify(self, command): matched = None for pattern in GKEHandler.allowed_commands: p = re.compile(pattern, re.IGNORECASE) matched = p.match(command) if matched: return True return False
def _delete_network(self, project, cluster_name): try: resp = self.compute_service.networks().delete( project=project, network=cluster_name ).execute() except Exception as e: fmlogger.error(e) network_deleted = False count = 0 while not network_deleted and count < GCLOUD_ACTION_TIMEOUT: try: network_obj = self.compute_service.networks().get( project=project, network=cluster_name ).execute() except Exception as e: fmlogger.error(e) network_deleted = True else: time.sleep(1) count = count + 1 if count >= GCLOUD_ACTION_TIMEOUT: message = ("Failed to delete network {network_name}").format(network_name=cluster_name) raise exceptions.EnvironmentDeleteFailure("Failed to delete network ")
def create_msg_regex(self, file_lsit): ''' @summary: This method reads input file containing list of regular expressions to be matched against. @param file_list : List of file paths, contains search expressions. @return: A regex class instance, corresponding to loaded regex expressions. Will be used for matching operations by callers. ''' messages_regex = [] if file_lsit is None or (0 == len(file_lsit)): return None for filename in file_lsit: self.print_diagnostic_message('processing match file:%s' % filename) with open(filename, 'rb') as csvfile: csvreader = csv.reader(csvfile, quotechar='"', delimiter=',', skipinitialspace=True) for index, row in enumerate(csvreader): self.print_diagnostic_message('[diagnostic]:processing row:%d' % index) self.print_diagnostic_message('row:%s'% row) try: #-- Ignore commented Lines and Empty Lines if (not row or row[0].startswith(comment_key)): self.print_diagnostic_message('[diagnostic]:skipping row[0]:%s' % row[0]) continue #-- ('s' | 'r') = (Raw String | Regular Expression) is_regex = row[0] if ('s' == row[0]): is_regex = False elif ('r' == row[0]): is_regex = True else: raise Exception('file:%s, malformed line:%d. ' 'must be \'s\'(string) or \'r\'(regex)' %(filename,index)) #-- One error message per line error_string = row[1] if (is_regex): messages_regex.append(error_string) else: messages_regex.append(self.error_to_regx(error_string)) except Exception as e: print 'ERROR: line %d is formatted incorrectly in file %s. Skipping line' % (index, filename) print repr(e) sys.exit(err_invalid_string_format) if (len(messages_regex)): regex = re.compile('|'.join(messages_regex)) else: regex = None return regex, messages_regex #---------------------------------------------------------------------
def _create_network(self, env_id, project, cluster_name): network_name = cluster_name try: resp = self.compute_service.networks().insert( project=project, body={"autoCreateSubnetworks": True, "routingConfig":{ "routingMode": "GLOBAL" }, "name": network_name } ).execute() except Exception as e: fmlogger.error(e) env_update = {} env_update['output_config'] = str({'error': str(e)}) env_db.Environment().update(env_id, env_update) raise e network_obj = '' count = 0 while not network_obj: try: network_obj = self.compute_service.networks().get( project=project, network=network_name ).execute() except Exception as e: fmlogger.error(e) #env_update = {} #env_update['output_config'] = str({'error': str(e)}) #env_db.Environment().update(env_id, env_update) if network_obj: break else: time.sleep(2) count = count + 1 if count >= GCLOUD_ACTION_TIMEOUT: raise exceptions.AppDeploymentFailure() return network_obj
def delete_cluster(self, env_id, env_info, resource_obj): fmlogger.debug("Deleting GKE cluster") res_db.Resource().update(resource_obj.id, {'status': 'deleting'}) try: filtered_description = ast.literal_eval(resource_obj.filtered_description) cluster_name = filtered_description['cluster_name'] project = filtered_description['project'] zone = filtered_description['zone'] # Network delete is not working for some reason. So temporarily # commenting it out. #try: # self._delete_network(project, cluster_name) #except Exception as e: # fmlogger.error("Exception deleting network %s " % str(e)) # env_update = {} # env_update['output_config'] = str({'error': str(e)}) # env_db.Environment().update(env_id, env_update) self._delete_firewall_rule(project, cluster_name) try: resp = self.gke_service.projects().zones().clusters().delete( projectId=project, zone=zone, clusterId=cluster_name ).execute() fmlogger.debug(resp) except Exception as e: fmlogger.error("Encountered exception when deleting cluster %s" % e) available = True while available: try: resp = self.gke_service.projects().zones().clusters().get( projectId=project, zone=zone, clusterId=cluster_name).execute() except Exception as e: fmlogger.error("Exception encountered in retrieving cluster. Cluster does not exist. %s " % e) available = False break time.sleep(3) except Exception as e: fmlogger.error(e) res_db.Resource().delete(resource_obj.id) fmlogger.debug("Done deleting GKE cluster.")