我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用pickle.Pickler()。
def __init__(self, writer, reducers=None, protocol=pickle.HIGHEST_PROTOCOL): pickle.Pickler.__init__(self, writer, protocol=protocol) self.extended_init = set() if reducers is None: reducers = {} if hasattr(pickle.Pickler, 'dispatch'): # Make the dispatch registry an instance level attribute instead of # a reference to the class dictionary under Python 2 self.dispatch = pickle.Pickler.dispatch.copy() else: # Under Python 3 initialize the dispatch table with a copy of the # default registry self.dispatch_table = copyreg.dispatch_table.copy() for type, reduce_func in reducers.items(): self.register(type, reduce_func)
def save(self, obj): if isinstance(obj, (types.MethodType, type({}.pop))): # the Pickler cannot pickle instance methods; here we decompose # them into components that make them uniquely identifiable if hasattr(obj, '__func__'): func_name = obj.__func__.__name__ else: func_name = obj.__name__ inst = obj.__self__ if type(inst) == type(pickle): obj = _MyHash(func_name, inst.__name__) elif inst is None: # type(None) or type(module) do not pickle obj = _MyHash(func_name, inst) else: cls = obj.__self__.__class__ obj = _MyHash(func_name, inst, cls) Pickler.save(self, obj)
def save_extras(self): if not self.cachefile: return glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True) i = os.getpid() lf = None while not lf: lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False) if not lf or os.path.exists(self.cachefile + "-" + str(i)): if lf: bb.utils.unlockfile(lf) lf = None i = i + 1 continue with open(self.cachefile + "-" + str(i), "wb") as f: p = pickle.Pickler(f, -1) p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION]) bb.utils.unlockfile(lf) bb.utils.unlockfile(glf)
def dumpState(self): DUMP_LOCK.acquire() try: TMP_STATE_FILENAME = STATE_FILENAME+".tmp" open(TMP_STATE_FILENAME, 'w').close() s = open(TMP_STATE_FILENAME, "wb") p = pickle.Pickler(s, pickle.HIGHEST_PROTOCOL) p.dump(self._agg) p.dump(self._slices) p.dump(self.proxy_dockermaster) p.dump(self.public_url) s.close() copyfile(TMP_STATE_FILENAME, STATE_FILENAME) except RuntimeError: print 'error in DumpState' pass DUMP_LOCK.release()
def test_priming_pickler_memo(self): # Verify that we can set the Pickler's memo attribute. data = ["abcdefg", "abcdefg", 44] f = io.BytesIO() pickler = self.pickler_class(f) pickler.dump(data) first_pickled = f.getvalue() f = io.BytesIO() primed = self.pickler_class(f) primed.memo = pickler.memo primed.dump(data) primed_pickled = f.getvalue() self.assertNotEqual(first_pickled, primed_pickled)
def warn(server, user, data): if server.name in data.keys(): if user.name in data[server.name].keys(): data[server.name][user.name] += 1 else: uniqueData = {user.name: 1} data[server.name].update(uniqueData) else: uniqueData = {server.name:{user.name: 1}} data.update(uniqueData) f = open(fileName, "wb") p = pickle.Pickler(f) p.dump(data) f.close() return data #####################################################################################################################################################
def post(server, user, data, money): if server.name in data.keys(): if user.name in data[server.name].keys(): data[server.name][user.name] = money else: uniqueData = {user.name: 500} data[server.name].update(uniqueData) else: uniqueData = {server.name:{user.name: 500}} data.update(uniqueData) f = open(fileName, "wb") p = pickle.Pickler(f) p.dump(data) f.close() return data ########################################################################################################################
def _dorequest(self, rf, wf): rp = pickle.Unpickler(rf) try: request = rp.load() except EOFError: return 0 if self._verbose > 1: print "Got request: %s" % repr(request) try: methodname, args, id = request if '.' in methodname: reply = (None, self._special(methodname, args), id) elif methodname[0] == '_': raise NameError, "illegal method name %s" % repr(methodname) else: method = getattr(self, methodname) reply = (None, apply(method, args), id) except: reply = (sys.exc_type, sys.exc_value, id) if id < 0 and reply[:2] == (None, None): if self._verbose > 1: print "Suppress reply" return 1 if self._verbose > 1: print "Send reply: %s" % repr(reply) wp = pickle.Pickler(wf) wp.dump(reply) return 1
def save_global(self, obj, name=None, pack=struct.pack): # We have to override this method in order to deal with objects # defined interactively in IPython that are not injected in # __main__ kwargs = dict(name=name, pack=pack) if sys.version_info >= (3, 4): del kwargs['pack'] try: Pickler.save_global(self, obj, **kwargs) except pickle.PicklingError: Pickler.save_global(self, obj, **kwargs) module = getattr(obj, "__module__", None) if module == '__main__': my_name = name if my_name is None: my_name = obj.__name__ mod = sys.modules[module] if not hasattr(mod, my_name): # IPython doesn't inject the variables define # interactively in __main__ setattr(mod, my_name, obj)
def setError(self, source, i, stocks): "Triggers the error protocol. Saves source, index, and stocks to PKL file." self.hasErrorOccurred = True print('\n') print('The connection has been severed. Saving existing data to: ' + self.fileName) with open(self.fileName, 'wb') as pklFile: pickler = pickle.Pickler(pklFile, pickle.HIGHEST_PROTOCOL) # Dump location pickler.dump(source) # Dump page number pickler.dump(i) # Dump stocks list pickler.dump(stocks) return
def test_pickler(self): basesize = support.calcobjsize('5P2n3i2n3iP') p = _pickle.Pickler(io.BytesIO()) self.assertEqual(object.__sizeof__(p), basesize) MT_size = struct.calcsize('3nP0n') ME_size = struct.calcsize('Pn0P') check = self.check_sizeof check(p, basesize + MT_size + 8 * ME_size + # Minimal memo table size. sys.getsizeof(b'x'*4096)) # Minimal write buffer size. for i in range(6): p.dump(chr(i)) check(p, basesize + MT_size + 32 * ME_size + # Size of memo table required to # save references to 6 objects. 0) # Write buffer is cleared after every dump().
def python_memcache_serializer(key, value): flags = 0 if isinstance(value, str): pass elif isinstance(value, int): flags |= FLAG_INTEGER value = "%d" % value elif isinstance(value, long): flags |= FLAG_LONG value = "%d" % value else: flags |= FLAG_PICKLE output = StringIO() pickler = pickle.Pickler(output, 0) pickler.dump(value) value = output.getvalue() return value, flags
def __init__(self, filename, compress=0, cache_size=10, protocol=None): self._filename = filename self._filenames = [filename, ] self.cache_size = cache_size self.compress = compress if not self.compress: self.file = open(filename, 'wb') else: self.file = BytesIO() # Count the number of npy files that we have created: self._npy_counter = 0 # By default we want a pickle protocol that only changes with # the major python version and not the minor one if protocol is None: protocol = (pickle.DEFAULT_PROTOCOL if PY3 else pickle.HIGHEST_PROTOCOL) Pickler.__init__(self, self.file, protocol=protocol) # delayed import of numpy, to avoid tight coupling try: import numpy as np except ImportError: np = None self.np = np
def save(self, obj): # Remove the tag.trace attribute from Variable and Apply nodes if isinstance(obj, theano.gof.utils.scratchpad): for tag in self.tag_to_remove: if hasattr(obj, tag): del obj.__dict__[tag] # Remove manually-added docstring of Elemwise ops elif (isinstance(obj, theano.tensor.Elemwise)): if '__doc__' in obj.__dict__: del obj.__dict__['__doc__'] return Pickler.save(self, obj) # Make an unpickler that tries encoding byte streams before raising TypeError. # This is useful with python 3, in order to unpickle files created with # python 2. # This code is taken from Pandas, https://github.com/pydata/pandas, # under the same 3-clause BSD license.
def __init__(self, filename, compress=0, cache_size=10, protocol=None): self._filename = filename self._filenames = [filename, ] self.cache_size = cache_size self.compress = compress if not self.compress: self.file = open(filename, 'wb') else: self.file = BytesIO() # Count the number of npy files that we have created: self._npy_counter = 1 # By default we want a pickle protocol that only changes with # the major python version and not the minor one if protocol is None: protocol = (pickle.DEFAULT_PROTOCOL if PY3_OR_LATER else pickle.HIGHEST_PROTOCOL) Pickler.__init__(self, self.file, protocol=protocol) # delayed import of numpy, to avoid tight coupling try: import numpy as np except ImportError: np = None self.np = np
def loadData (self, filename, verbose=True, replace_missing=True): ''' Get the data from a text file in one of 3 formats: matrix, sparse, binary_sparse''' if verbose: print("========= Reading " + filename) start = time.time() if self.use_pickle and os.path.exists (os.path.join (self.tmp_dir, os.path.basename(filename) + ".pickle")): with open (os.path.join (self.tmp_dir, os.path.basename(filename) + ".pickle"), "r") as pickle_file: vprint (verbose, "Loading pickle file : " + os.path.join(self.tmp_dir, os.path.basename(filename) + ".pickle")) return pickle.load(pickle_file) if 'format' not in self.info.keys(): self.getFormatData(filename) if 'feat_num' not in self.info.keys(): self.getNbrFeatures(filename) data_func = {'dense':data_io.data, 'sparse':data_io.data_sparse, 'sparse_binary':data_io.data_binary_sparse} data = data_func[self.info['format']](filename, self.info['feat_num']) # INPORTANT: when we replace missing values we double the number of variables if self.info['format']=='dense' and replace_missing and np.any(map(np.isnan,data)): vprint (verbose, "Replace missing values by 0 (slow, sorry)") data = data_converter.replace_missing(data) if self.use_pickle: with open (os.path.join (self.tmp_dir, os.path.basename(filename) + ".pickle"), "wb") as pickle_file: vprint (verbose, "Saving pickle file : " + os.path.join (self.tmp_dir, os.path.basename(filename) + ".pickle")) p = pickle.Pickler(pickle_file) p.fast = True p.dump(data) end = time.time() if verbose: print( "[+] Success in %5.2f sec" % (end - start)) return data
def loadLabel (self, filename, verbose=True): ''' Get the solution/truth values''' if verbose: print("========= Reading " + filename) start = time.time() if self.use_pickle and os.path.exists (os.path.join (self.tmp_dir, os.path.basename(filename) + ".pickle")): with open (os.path.join (self.tmp_dir, os.path.basename(filename) + ".pickle"), "r") as pickle_file: vprint (verbose, "Loading pickle file : " + os.path.join (self.tmp_dir, os.path.basename(filename) + ".pickle")) return pickle.load(pickle_file) if 'task' not in self.info.keys(): self.getTypeProblem(filename) # IG: Here change to accommodate the new multiclass label format if self.info['task'] == 'multilabel.classification': label = data_io.data(filename) elif self.info['task'] == 'multiclass.classification': label = data_converter.convert_to_num(data_io.data(filename)) else: label = np.ravel(data_io.data(filename)) # get a column vector #label = np.array([np.ravel(data_io.data(filename))]).transpose() # get a column vector if self.use_pickle: with open (os.path.join (self.tmp_dir, os.path.basename(filename) + ".pickle"), "wb") as pickle_file: vprint (verbose, "Saving pickle file : " + os.path.join (self.tmp_dir, os.path.basename(filename) + ".pickle")) p = pickle.Pickler(pickle_file) p.fast = True p.dump(label) end = time.time() if verbose: print( "[+] Success in %5.2f sec" % (end - start)) return label
def write_catastrophe_record(self, *args, **kwargs): if self.catastrophe_coordinator is not None: if self.catastrophe_coordinator.should_log(): record = CatastropheRecord(*args, **kwargs) filename = self.catastrophe_coordinator.get_filename() with open(filename, "wb") as f: pickler = pickle.Pickler(f) pickler.dump(record) self.catastrophe_coordinator.step()
def write_block_record(self, *args, **kwargs): if self.block_coordinator is not None: if self.block_coordinator.should_log(): record = BlockRecord(*args, **kwargs) filename = self.block_coordinator.get_filename() with open(filename, "wb") as f: pickler = pickle.Pickler(f) pickler.dump(record) self.block_coordinator.step()
def save_episode(episode_path, episode): with gzip.open(episode_path, "wb") as f: pickler = pickle.Pickler(f) pickler.dump(episode)
def fix_episode(episode_path): try: episode = load_episode(episode_path) except EOFError: print("Error reading: {}".format(episode_path)) os.remove(episode_path) return if episode.version == 2: print("Version 2 already: {}".format(episode_path)) return old_frames = episode.frames episode.frames = [] for i in range(len(old_frames) - 1): f = old_frames[i] f.action = old_frames[i + 1].action episode.frames.append(f) episode.version = 2 s = pickle.dumps(episode) with gzip.open(episode_path, "wb") as f: f.write(s) # pickler = pickle.Pickler(f) # pickler.dump(episode) # save_episode(episode_path, episode)
def save_labels(filename, episode, frames): """ Args: filename (str): file to save labeled episode to. episode (episode): an episode to save labeled `frames` to (this is pickled and gzipped) frames (list of frames): a time ordered list of labeled frames. Summary: Save labeled frames to a file. """ episode.frames = frames with gzip.open(filename, "wb") as f: pickler = pickle.Pickler(f) pickler.dump(episode)
def dumps_with_persistent_ids(obj, protocol=None): """ Performs a pickle dumps on the given object, substituting all references to a TradingEnvironment or AssetFinder with tokenized representations. All arguments are passed to pickle.Pickler and are described therein. """ file = BytesIO() pickler = pickle.Pickler(file, protocol) pickler.persistent_id = _persistent_id pickler.dump(obj) return file.getvalue()
def __setitem__(self, key, value): if self.writeback: self.cache[key] = value f = StringIO() p = Pickler(f, self._protocol) p.dump(value) self.dict[key] = f.getvalue()
def register(self, type, reduce_func): """Attach a reducer function to a given type in the dispatch table.""" if hasattr(pickle.Pickler, 'dispatch'): # Python 2 pickler dispatching is not explicitly customizable. # Let us use a closure to workaround this limitation. def dispatcher(self, obj): reduced = reduce_func(self, obj) self.save_reduce(obj=obj, *reduced) self.dispatch[type] = dispatcher else: self.dispatch_table[type] = lambda obj: reduce_func(self, obj)
def createPickleChild(self, name, child): if not os.path.isdir(self.path): resource.Resource.putChild(self, name, child) # xxx use a file-extension-to-save-function dictionary instead if type(child) == type(""): fl = open(os.path.join(self.path, name), 'wb') fl.write(child) else: if '.' not in name: name = name + '.trp' fl = open(os.path.join(self.path, name), 'wb') from pickle import Pickler pk = Pickler(fl) pk.dump(child) fl.close()