我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用thread.allocate_lock()。
def _run_in_multiple_threads(test1): test1() import sys try: import thread except ImportError: import _thread as thread errors = [] def wrapper(lock): try: test1() except: errors.append(sys.exc_info()) lock.release() locks = [] for i in range(10): _lock = thread.allocate_lock() _lock.acquire() thread.start_new_thread(wrapper, (_lock,)) locks.append(_lock) for _lock in locks: _lock.acquire() if errors: raise errors[0][1]
def __init__(self, port="/dev/ttyUSB0", baudrate=57600, timeout=0.5): self.PID_RATE = 30 # Do not change this! It is a fixed property of the Arduino PID controller. self.PID_INTERVAL = 1000 / 30 self.port = port self.baudrate = baudrate self.timeout = timeout self.encoder_count = 0 self.writeTimeout = timeout self.interCharTimeout = timeout / 30. # Keep things thread safe self.mutex = thread.allocate_lock() # An array to cache analog sensor readings self.analog_sensor_cache = [None] * self.N_ANALOG_PORTS # An array to cache digital sensor readings self.digital_sensor_cache = [None] * self.N_DIGITAL_PORTS
def __init__(self, name, cache_size=512, marshal=marshal): """ Create a persistent FIFO queue named by the 'name' argument. The number of cached queue items at the head and tail of the queue is determined by the optional 'cache_size' parameter. By default the marshal module is used to (de)serialize queue items, but you may specify an alternative serialize module/instance with the optional 'marshal' argument (e.g. pickle). """ assert cache_size > 0, 'Cache size must be larger than 0' self.name = name self.cache_size = cache_size self.marshal = marshal self.index_file = os.path.join(name, INDEX_FILENAME) self.temp_file = os.path.join(name, 'tempfile') self.mutex = thread.allocate_lock() self._init_index()
def __init__(self, maxEntries=10000, maxAge=14400): """Create a new SessionCache. @type maxEntries: int @param maxEntries: The maximum size of the cache. When this limit is reached, the oldest sessions will be deleted as necessary to make room for new ones. The default is 10000. @type maxAge: int @param maxAge: The number of seconds before a session expires from the cache. The default is 14400 (i.e. 4 hours).""" self.lock = thread.allocate_lock() # Maps sessionIDs to sessions self.entriesDict = {} #Circular list of (sessionID, timestamp) pairs self.entriesList = [(None,None)] * maxEntries self.firstIndex = 0 self.lastIndex = 0 self.maxAge = maxAge
def __init__(self, application, environ=None, multithreaded=True, **kw): """ environ, if present, must be a dictionary-like object. Its contents will be copied into application's environ. Useful for passing application-specific variables. Set multithreaded to False if your application is not MT-safe. """ if kw.has_key('handler'): del kw['handler'] # Doesn't make sense to let this through super(WSGIServer, self).__init__(**kw) if environ is None: environ = {} self.application = application self.environ = environ self.multithreaded = multithreaded # Used to force single-threadedness self._app_lock = thread.allocate_lock()
def test_thread_separation(): def f(): c = PluginManager() lock1.acquire() lock2.acquire() c.x = 7 lock1.release() lock2.release() lock1 = thread.allocate_lock() lock2 = thread.allocate_lock() lock1.acquire() thread.start_new_thread(f, ()) a = PluginManager() a.x = 5 lock1.release() lock2.acquire() return a.x
def __init__(self, lock=None): # the lock actually used by .acquire() and .release() if lock is None: self.mutex = thread.allocate_lock() else: if hasattr(lock, 'acquire') and \ hasattr(lock, 'release'): self.mutex = lock else: raise TypeError, 'condition constructor requires ' \ 'a lock argument' # lock used to block threads until a signal self.checkout = thread.allocate_lock() self.checkout.acquire() # internal critical-section lock, & the data it protects self.idlock = thread.allocate_lock() self.id = 0 self.waiting = 0 # num waiters subject to current release self.pending = 0 # num waiters awaiting next signal self.torelease = 0 # num waiters to release self.releasing = 0 # 1 iff release is in progress
def __init__(self): self.debugApplication = None self.debuggingThread = None self.debuggingThreadStateHandle = None self.stackSnifferCookie = self.stackSniffer = None self.codeContainerProvider = None self.debuggingThread = None self.breakFlags = None self.breakReason = None self.appDebugger = None self.appEventConnection = None self.logicalbotframe = None # Anything at this level or below does not exist! self.currentframe = None # The frame we are currently in. self.recursiveData = [] # Data saved for each reentery on this thread. bdb.Bdb.__init__(self) self._threadprotectlock = thread.allocate_lock() self.reset()
def main(url): global db,cursor try: db=pymysql.connect('localhost','root','root','test') cursor=db.cursor() sqlDelete='delete from portsscan' cursor.execute(sqlDelete) db.commit() except Exception,e: print '1',e # try: # os.remove('./../../result/allPorts.txt') # except Exception,e: # print e # url = raw_input('Input the ip you want to scan:\n') lock = thread.allocate_lock() ip_scan(url) db.close() return
def main(): print 'starting threads...' locks = [] nloops = range(len(loops)) for i in nloops: lock = thread.allocate_lock() lock.acquire() locks.append(lock) for i in nloops: thread.start_new_thread(loop, (i, loops[i], locks[i])) for i in nloops: while locks[i].locked(): pass print 'all DONE at:', ctime()
def __init__(self, dsn, dsnDict, console): """ Object constructor. :param dsn: The database connection string. :param dsnDict: The database connection string parsed into a dict. :param console: The console instance. """ self.dsn = dsn self.dsnDict = dsnDict self.console = console self.db = None self._lock = thread.allocate_lock() #################################################################################################################### # # # CONNECTION INITIALIZATION/TERMINATION/RETRIEVAL # # # ####################################################################################################################
def __init__(self, n=1): sync = thread.allocate_lock() self._acquire = sync.acquire self._release = sync.release pool = [] self._lists = ( pool, # Collection of locks representing threads are not # waiting for work to do [], # Request queue [], # Pool of locks representing threads that are # waiting (ready) for work to do. ) self._acquire() # callers will block try: while n > 0: l = thread.allocate_lock() l.acquire() pool.append(l) thread.start_new_thread(ZServerPublisher, (self.accept,)) n = n - 1 finally: self._release() # let callers through now
def __init__(self, raw, buffer_size=DEFAULT_BUFFER_SIZE): """Create a new buffered reader using the given readable raw IO object. """ if not raw.readable(): raise IOError('"raw" argument must be readable.') _BufferedIOMixin.__init__(self, raw) if buffer_size <= 0: raise ValueError("invalid buffer size") self.buffer_size = buffer_size self._reset_read_buf() self._read_lock = Lock()
def __init__(self, raw, buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None): if not raw.writable(): raise IOError('"raw" argument must be writable.') _BufferedIOMixin.__init__(self, raw) if buffer_size <= 0: raise ValueError("invalid buffer size") if max_buffer_size is not None: warnings.warn("max_buffer_size is deprecated", DeprecationWarning, self._warning_stack_offset) self.buffer_size = buffer_size self._write_buf = bytearray() self._write_lock = Lock()
def allocate_lock(): """Dummy implementation of thread.allocate_lock().""" return LockType()
def __init__(self, bind, connect): 'Initialize the Proxy object.' self.__bind = bind self.__connect = connect self.__status = False self.__thread = False self.__lock = _thread.allocate_lock()
def __init__(self, function, *args, **kwargs): 'Initialize the Mille_Timer object.' self.__function = function self.__args = args self.__kwargs = kwargs self.__status = False self.__thread = False self.__lock = _thread.allocate_lock()
def __init__(self, socket): 'Initialize the Zero SPOTS Protocol object.' self.__sock = socket self.__send = _thread.allocate_lock() self.__recv = _thread.allocate_lock() self.__temp = ''
def __init__(self, ZSP): 'Initialize the Query/Reply Protocol object.' self.__ZSP = ZSP self.__error = None self.__Q_anchor = [] self.__Q_packet = [] self.__R_anchor = {} self.__Q_lock = _thread.allocate_lock() self.__R_lock = _thread.allocate_lock() _thread.start_new_thread(self.__thread, ())
def recv_R(self, ID, timeout=None): 'Receive one reply.' if self.__error: raise self.__error if timeout is not None: if not isinstance(timeout, (float, int, long)): raise TypeError, 'timeout must be of type float, int, or long' if not timeout >= 0: raise ValueError, 'timeout must be greater than or equal to 0' anchor = [_thread.allocate_lock()] anchor[0].acquire() self.__R_lock.acquire() try: try: self.__R_anchor[ID] = anchor finally: self.__R_lock.release() except AttributeError: raise self.__error if timeout: _thread.start_new_thread(self.__R_thread, (timeout, ID)) anchor[0].acquire() try: R = anchor[1] except IndexError: if self.__error: raise self.__error raise Warning return R
def __init__(self, QRP): 'Initialize the Query/Reply Interface object.' self.__QRP = QRP self.__ID = 0 self.__lock = _thread.allocate_lock()
def __init__(self, maxsize=0, priorities = standard_priorities, realtime = 1, idle = 1): """Initialize a queue object with a given maximum size. If maxsize is <= 0, the queue size is infinite. priorities: a dictionary with definition of priorities """ assert self._check_priorities(priorities) #check only if not -OO import thread self._init(priorities, maxsize, realtime, idle) self.mutex = thread.allocate_lock() self.esema = thread.allocate_lock() self.esema.acquire() self.fsema = thread.allocate_lock()
def __init__(self, *pipes): self.active_pipes = set() self.active_sources = set() self.active_drains = set() self.active_sinks = set() self._add_pipes(*pipes) self.thread_lock = thread.allocate_lock() self.command_lock = thread.allocate_lock() self.__fdr,self.__fdw = os.pipe() self.threadid = None
def __init__(self, filename, type): self.type = type self.filename = filename if self.filename: self.db = None else: self.db = {} self.lock = thread.allocate_lock()
def __init__(self, mod_name = '__main__', launch_file = None): ReplBackend.__init__(self) self.launch_file = launch_file self.mod_name = mod_name self.km = VsKernelManager() if is_ipython_versionorgreater(0, 13): # http://pytools.codeplex.com/workitem/759 # IPython stopped accepting the ipython flag and switched to launcher, the new # default is what we want though. self.km.start_kernel(**{'extra_arguments': self.get_extra_arguments()}) else: self.km.start_kernel(**{'ipython': True, 'extra_arguments': self.get_extra_arguments()}) self.km.start_channels() self.exit_lock = thread.allocate_lock() self.exit_lock.acquire() # used as an event self.members_lock = thread.allocate_lock() self.members_lock.acquire() self.km.shell_channel._vs_backend = self self.km.stdin_channel._vs_backend = self if is_ipython_versionorgreater(1, 0): self.km.iopub_channel._vs_backend = self else: self.km.sub_channel._vs_backend = self self.km.hb_channel._vs_backend = self self.execution_count = 1
def __init__(self, socket, callback): self.socket = socket self.seq = 0 self.callback = callback self.lock = thread.allocate_lock() # start the testing reader thread loop self.test_thread_id = thread.start_new_thread(self.readSocket, ())
def __init__(self): self.default_mode = BREAK_MODE_UNHANDLED self.break_on = { } self.handler_cache = dict(self.BUILT_IN_HANDLERS) self.handler_lock = thread.allocate_lock() self.add_exception('exceptions.IndexError', BREAK_MODE_NEVER) self.add_exception('builtins.IndexError', BREAK_MODE_NEVER) self.add_exception('exceptions.KeyError', BREAK_MODE_NEVER) self.add_exception('builtins.KeyError', BREAK_MODE_NEVER) self.add_exception('exceptions.AttributeError', BREAK_MODE_NEVER) self.add_exception('builtins.AttributeError', BREAK_MODE_NEVER) self.add_exception('exceptions.StopIteration', BREAK_MODE_NEVER) self.add_exception('builtins.StopIteration', BREAK_MODE_NEVER) self.add_exception('exceptions.GeneratorExit', BREAK_MODE_NEVER) self.add_exception('builtins.GeneratorExit', BREAK_MODE_NEVER)
def __init__(self, id = None): if id is not None: self.id = id else: self.id = thread.get_ident() self._events = {'call' : self.handle_call, 'line' : self.handle_line, 'return' : self.handle_return, 'exception' : self.handle_exception, 'c_call' : self.handle_c_call, 'c_return' : self.handle_c_return, 'c_exception' : self.handle_c_exception, } self.cur_frame = None self.stepping = STEPPING_NONE self.unblock_work = None self._block_lock = thread.allocate_lock() self._block_lock.acquire() self._block_starting_lock = thread.allocate_lock() self._is_blocked = False self._is_working = False self.stopped_on_line = None self.detach = False self.trace_func = self.trace_func # replace self.trace_func w/ a bound method so we don't need to re-create these regularly self.prev_trace_func = None self.trace_func_stack = [] self.reported_process_loaded = False self.django_stepping = None self.is_sending = False # stackless changes if stackless is not None: self._stackless_attach() if sys.platform == 'cli': self.frames = []
def __init__(self): self.lock = thread.allocate_lock()
def __init__ (self, logger = None): r, w = os.pipe() self.trigger = w self.logger = logger asyncore.file_dispatcher.__init__ (self, r) self.lock = _thread.allocate_lock() self.thunks = []
def __init__ (self, logger = None): self.logger = logger sock_class = socket.socket a = sock_class (socket.AF_INET, socket.SOCK_STREAM) w = sock_class (socket.AF_INET, socket.SOCK_STREAM) try: a.setsockopt( socket.SOL_SOCKET, socket.SO_REUSEADDR, a.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR) | 1 ) except socket.error: pass # tricky: get a pair of connected sockets a.bind (self.address) a.listen (1) w.setblocking (0) try: w.connect (self.address) except: pass r, addr = a.accept() a.close() w.setblocking (1) self.trigger = w asyncore.dispatcher.__init__ (self, r) self.lock = _thread.allocate_lock() self.thunks = [] self._trigger_connected = 0
def __init__(self): """ Most important init method """ self.events = [] self.mode = "m" self.proxy = xmlrpclib.ServerProxy("http://localhost:20757") self.lock = thread.allocate_lock() self.event_to_send = -1