Python six.moves.cPickle 模块,HIGHEST_PROTOCOL 实例源码

我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用six.moves.cPickle.HIGHEST_PROTOCOL

项目:logodetect    作者:munibasad    | 项目源码 | 文件源码
def maybe_pickle(data_dirs, force=False):
    dataset_names = []
    for dir in data_dirs:
        set_filename = dir + '.pickle'
        dataset_names.append(set_filename)
        if os.path.exists(set_filename) and not force:
            # You may overwrite by setting force=True
            print('%s already present - Skipping pickling. ' % set_filename)
        else:
            print('Pickling %s.' % set_filename)
            dataset = load_logo(dir)
            try:
                with open(set_filename, 'wb') as f:
                    pickle.dump(dataset, f, pickle.HIGHEST_PROTOCOL)
            except Exception as e:
                print('Unable to save data to', set_filename, ':', e)
    return dataset_names
项目:logodetect    作者:munibasad    | 项目源码 | 文件源码
def save_pickle(train_dataset, train_labels, valid_dataset, valid_labels,
                test_dataset, test_labels):
    try:
        f = open(PICKLE_FILENAME, 'wb')
        save = {
            'train_dataset': train_dataset,
            'train_labels': train_labels,
            'valid_dataset': valid_dataset,
            'valid_labels': valid_labels,
            'test_dataset': test_dataset,
            'test_labels': test_labels,
        }
        pickle.dump(save, f, pickle.HIGHEST_PROTOCOL)
        f.close()
    except Exception as e:
        print('Unable to save data to', PICKLE_FILENAME, ':', e)
        raise
项目:mgtools    作者:miyagaw61    | 项目源码 | 文件源码
def save_snapshot(self, filename=None):
        """
        Save a snapshot of current process to file
        Warning: this is not thread safe, do not use with multithread program

        Args:
            - filename: target file to save snapshot

        Returns:
            - Bool
        """
        if not filename:
            filename = self.get_config_filename("snapshot")

        snapshot = self.take_snapshot()
        if not snapshot:
            return False
        # dump to file
        fd = open(filename, "wb")
        pickle.dump(snapshot, fd, pickle.HIGHEST_PROTOCOL)
        fd.close()

        return True
项目:ML-Project    作者:Shiam-Chowdhury    | 项目源码 | 文件源码
def maybe_pickle(data_folders, min_num_images_per_class, force=False):
  dataset_names = []
  folders_list = os.listdir(data_folders)
  for folder in folders_list:

    #print(os.path.join(data_folders, folder))
    curr_folder_path = os.path.join(data_folders, folder)
    if os.path.isdir(curr_folder_path):
        set_filename = curr_folder_path + '.pickle'
        dataset_names.append(set_filename)
        if os.path.exists(set_filename) and not force:
          # You may override by setting force=True.
          print('%s already present - Skipping pickling.' % set_filename)
        else:
          print('Pickling %s.' % set_filename)
          dataset = load_letter(curr_folder_path, min_num_images_per_class) # load and normalize the data
          try:
            with open(set_filename, 'wb') as f:
                pickle.dump(dataset, f, pickle.HIGHEST_PROTOCOL)
                f.close()
          except Exception as e:
            print('Unable to save data to', set_filename, ':', e)

  return dataset_names
项目:FCN-GoogLeNet    作者:DeepSegment    | 项目源码 | 文件源码
def read_dataset(data_dir):
    pickle_filename = "PascalVoc.pickle"
    pickle_filepath = os.path.join(data_dir, pickle_filename)
    if not os.path.exists(pickle_filepath):
        utils.maybe_download_and_extract(data_dir, DATA_URL, is_tarfile=True)
        PascalVoc_folder = "VOCdevkit"
        result = create_image_lists(os.path.join(data_dir, PascalVoc_folder))
        print ("Pickling ...")
        with open(pickle_filepath, 'wb') as f:
            pickle.dump(result, f, pickle.HIGHEST_PROTOCOL)
    else:
        print ("Found pickle file!")

    with open(pickle_filepath, 'rb') as f:
        result = pickle.load(f)
        training_records = result['training']
        validation_records = result['validation']
        del result

    return training_records, validation_records
项目:FCN-GoogLeNet    作者:DeepSegment    | 项目源码 | 文件源码
def read_dataset(data_dir):
    pickle_filename = "MITSceneParsing.pickle"
    pickle_filepath = os.path.join(data_dir, pickle_filename)
    if not os.path.exists(pickle_filepath):
        utils.maybe_download_and_extract(data_dir, DATA_URL, is_zipfile=True)
        SceneParsing_folder = os.path.splitext(DATA_URL.split("/")[-1])[0]
        result = create_image_lists(os.path.join(data_dir, SceneParsing_folder))
        print ("Pickling ...")
        with open(pickle_filepath, 'wb') as f:
            pickle.dump(result, f, pickle.HIGHEST_PROTOCOL)
    else:
        print ("Found pickle file!")

    with open(pickle_filepath, 'rb') as f:
        result = pickle.load(f)
        training_records = result['training']
        validation_records = result['validation']
        del result

    return training_records, validation_records
项目:EBGAN.tensorflow    作者:shekkizh    | 项目源码 | 文件源码
def read_dataset(data_dir):
    pickle_filename = "celebA.pickle"
    pickle_filepath = os.path.join(data_dir, pickle_filename)
    if not os.path.exists(pickle_filepath):
        utils.maybe_download_and_extract(data_dir, DATA_URL, is_zipfile=True)
        celebA_folder = os.path.splitext(DATA_URL.split("/")[-1])[0]
        result = create_image_lists(os.path.join(data_dir, celebA_folder))
        print ("Training set: %d" % len(result['train']))
        print ("Test set: %d" % len(result['test']))
        print ("Validation set: %d" % len(result['validation']))
        print ("Pickling ...")
        with open(pickle_filepath, 'wb') as f:
            pickle.dump(result, f, pickle.HIGHEST_PROTOCOL)
    else:
        print ("Found pickle file!")

    with open(pickle_filepath, 'rb') as f:
        result = pickle.load(f)
        training_images = result['train']
        testing_images = result['test']
        validation_images = result['validation']

        del result
    return training_images, testing_images, validation_images
项目:tensorflow_image_tutorial    作者:ybenoit    | 项目源码 | 文件源码
def maybe_pickle(self, data_folders, min_num_images_per_class, force=False):
        dataset_names = []
        for folder in data_folders:
            set_filename = folder + '.pickle'
            dataset_names.append(set_filename)
            if os.path.exists(set_filename) and not force:
                # You may override by setting force=True.
                print('%s already present - Skipping pickling.' % set_filename)
            else:
                print('Pickling %s.' % set_filename)
                dataset = self.load_letter(folder, min_num_images_per_class, self.image_size, self.pixel_depth)
                try:
                    with open(set_filename, 'wb') as f:
                        pickle.dump(dataset, f, pickle.HIGHEST_PROTOCOL)
                except Exception as e:
                    print('Unable to save data to', set_filename, ':', e)

        return dataset_names
项目:drowsy_detection    作者:thandongtb    | 项目源码 | 文件源码
def save_train_and_test_set(dataset, labels, ratio, pickle_file):
    split = int(len(dataset) * ratio)
    train_dataset = dataset[:split]
    train_labels = labels[:split]
    test_dataset = dataset[split:]
    test_labels = labels[split:]

    try:
        f = open(pickle_file, 'wb')
        save = {
            'train_dataset': train_dataset,
            'train_labels': train_labels,
            'test_dataset': test_dataset,
            'test_labels': test_labels,
        }
        pickle.dump(save, f, pickle.HIGHEST_PROTOCOL)
        f.close()
    except Exception as e:
        print('Unable to save data to', pickle_file, ':', e)
        raise

    statinfo = os.stat(pickle_file)
    print('Compressed pickle size:', statinfo.st_size)

# Main
项目:udacity-deep-learning    作者:hankcs    | 项目源码 | 文件源码
def maybe_pickle(data_folders, min_num_images_per_class, force=False):
    dataset_names = []
    for folder in data_folders:
        set_filename = folder + '.pickle'
        dataset_names.append(set_filename)
        if os.path.exists(set_filename) and not force:
            # You may override by setting force=True.
            print('%s already present - Skipping pickling.' % set_filename)
        else:
            print('Pickling %s.' % set_filename)
            dataset = load_letter(folder, min_num_images_per_class)
            try:
                with open(set_filename, 'wb') as f:
                    pickle.dump(dataset, f, pickle.HIGHEST_PROTOCOL)
            except Exception as e:
                print('Unable to save data to', set_filename, ':', e)

    return dataset_names
项目:vuln    作者:mikaelkall    | 项目源码 | 文件源码
def save_snapshot(self, filename=None):
        """
        Save a snapshot of current process to file
        Warning: this is not thread safe, do not use with multithread program

        Args:
            - filename: target file to save snapshot

        Returns:
            - Bool
        """
        if not filename:
            filename = self.get_config_filename("snapshot")

        snapshot = self.take_snapshot()
        if not snapshot:
            return False
        # dump to file
        fd = open(filename, "wb")
        pickle.dump(snapshot, fd, pickle.HIGHEST_PROTOCOL)
        fd.close()

        return True
项目:udacity-deep-learning    作者:runhani    | 项目源码 | 文件源码
def maybe_pickle(data_folders, min_num_images_per_class, force=False):
  dataset_names = []
  for folder in data_folders:
    set_filename = folder + '.pickle'
    dataset_names.append(set_filename)
    if os.path.exists(set_filename) and not force:
      # You may override by setting force=True.
      print('%s already present - Skipping pickling.' % set_filename)
    else:
      print('Pickling %s.' % set_filename)
      dataset = load_letter(folder, min_num_images_per_class)
      try:
        with open(set_filename, 'wb') as f:
          pickle.dump(dataset, f, pickle.HIGHEST_PROTOCOL)
      except Exception as e:
        print('Unable to save data to', set_filename, ':', e)

  return dataset_names
项目:streetview    作者:ydnaandy123    | 项目源码 | 文件源码
def read_dataset(data_dir):
    pickle_filename = "MITSceneParsing.pickle"
    pickle_filepath = os.path.join(data_dir, pickle_filename)
    if not os.path.exists(pickle_filepath):
        utils.maybe_download_and_extract(data_dir, DATA_URL, is_zipfile=True)
        SceneParsing_folder = os.path.splitext(DATA_URL.split("/")[-1])[0]
        result = create_image_lists(os.path.join(data_dir, SceneParsing_folder))
        print ("Pickling ...")
        with open(pickle_filepath, 'wb') as f:
            pickle.dump(result, f, pickle.HIGHEST_PROTOCOL)
    else:
        print ("Found pickle file!")

    with open(pickle_filepath, 'rb') as f:
        result = pickle.load(f)
        training_records = result['training']
        validation_records = result['validation']
        del result

    return training_records, validation_records
项目:odin    作者:imito    | 项目源码 | 文件源码
def test_load_save3(self):
        X = K.placeholder(shape=(None, 28, 28))
        ops = N.Sequence([
            N.Dimshuffle(pattern=(0, 1, 2, 'x')),
            N.Conv(8, (3, 3), strides=(1, 1), pad='same', activation=K.relu),
            K.pool2d,
            N.Flatten(outdim=2),
            N.Dense(64, activation=K.relu),
            N.Dense(10, activation=K.softmax)
        ])
        y = ops(X)
        f1 = K.function(X, y)

        ops_ = cPickle.loads(cPickle.dumps(ops, protocol=cPickle.HIGHEST_PROTOCOL))
        y_ = ops_(X)
        f2 = K.function(X, y_)

        x = np.random.rand(32, 28, 28)
        self.assertEqual(np.sum(f1(x) - f2(x)), 0.)
项目:peda-arm    作者:alset0326    | 项目源码 | 文件源码
def save_snapshot(self, filename=None):
        """
        Save a snapshot of current process to file
        Warning: this is not thread safe, do not use with multithread program

        Args:
            - filename: target file to save snapshot

        Returns:
            - Bool
        """
        if not filename:
            filename = self.get_config_filename("snapshot")

        snapshot = self.take_snapshot()
        if not snapshot:
            return False
        # dump to file
        fd = open(filename, "wb")
        pickle.dump(snapshot, fd, pickle.HIGHEST_PROTOCOL)
        fd.close()

        return True
项目:Colorization.tensorflow    作者:shekkizh    | 项目源码 | 文件源码
def read_dataset(data_dir):
    pickle_filename = "lamem.pickle"
    pickle_filepath = os.path.join(data_dir, pickle_filename)
    if not os.path.exists(pickle_filepath):
        utils.maybe_download_and_extract(data_dir, DATA_URL, is_tarfile=True)
        lamem_folder = (DATA_URL.split("/")[-1]).split(os.path.extsep)[0]
        result = {'images': create_image_lists(os.path.join(data_dir, lamem_folder))}
        print ("Pickling ...")
        with open(pickle_filepath, 'wb') as f:
            pickle.dump(result, f, pickle.HIGHEST_PROTOCOL)
    else:
        print ("Found pickle file!")

    with open(pickle_filepath, 'rb') as f:
        result = pickle.load(f)
        training_records = result['images']
        del result

    return training_records
项目:Theano-Deep-learning    作者:GeekLiB    | 项目源码 | 文件源码
def save_pkl(self):
        """
        Dump this object into its `key_pkl` file.

        May raise a cPickle.PicklingError if such an exception is raised at
        pickle time (in which case a warning is also displayed).

        """
        # Note that writing in binary mode is important under Windows.
        try:
            with open(self.key_pkl, 'wb') as f:
                pickle.dump(self, f, protocol=pickle.HIGHEST_PROTOCOL)
        except pickle.PicklingError:
            _logger.warning("Cache leak due to unpickle-able key data %s",
                            self.keys)
            os.remove(self.key_pkl)
            raise
项目:OpenMDAO    作者:OpenMDAO    | 项目源码 | 文件源码
def record_metadata_solver(self, recording_requester):
        """
        Record solver metadata.

        Parameters
        ----------
        recording_requester: <Solver>
            The Solver that would like to record its metadata.
        """
        path = recording_requester._system.pathname
        solver_class = type(recording_requester).__name__
        if not path:
            path = 'root'
        id = "{}.{}".format(path, solver_class)

        solver_options = pickle.dumps(recording_requester.options,
                                      pickle.HIGHEST_PROTOCOL)

        with self.con:
            self.con.execute(
                "INSERT INTO solver_metadata(id, solver_options, solver_class) "
                "VALUES(?,?,?)", (id, sqlite3.Binary(solver_options), solver_class))
项目:cxr_classification    作者:harishanand95    | 项目源码 | 文件源码
def save(self, dataset_filename="CXR_png.pickle", overwrite=False):
        if self._dataset is None:
            print("Dataset is empty. Run load_images before saving.")
            return

        data = {"dataset": self._dataset,
                "labels": self._labels,
                "valid_images_count": self._valid_images_count,
                "width": self._image_width,
                "height": self._image_height,
                "convert_to_gray": self._convert_to_gray,
                "folder": self._folder,
                "test_dataset": self._test_dataset,
                "test_labels": self._test_labels,
                "test_data_size": self._test_data_size}

        if overwrite is True:
            if os.path.isfile(dataset_filename):
                os.remove(dataset_filename)
        try:
            with open(dataset_filename, 'wb') as f:
                pickle.dump(data, f, pickle.HIGHEST_PROTOCOL)
        except Exception as e:
            print('Unable to save data to', dataset_filename, ':', e)
项目:nuts-flow    作者:maet3608    | 项目源码 | 文件源码
def __rrshift__(self, iterable):
        """
        Return elements in iterable.

        :param iterable iterable: Any iterable
        :return: Generator over same elements as input iterable.
        :rtype: Generator
        """
        if self.path or (self._cachepath and not self._clearcache):
            for e in self.__iter__():
                yield e
        else:
            self._create_cache()
            for i, e in enumerate(iterable):
                with open(self._fpath(i), 'wb') as f:
                    pickle.dump(e, f, pickle.HIGHEST_PROTOCOL)
                yield e
项目:deeplab_v1_tf1.0    作者:automan000    | 项目源码 | 文件源码
def main():
    """Extract and save network skeleton with the corresponding weights.

    Raises:
      ImportError: PyCaffe module is not found."""
    args = get_arguments()
    sys.path.append(args.pycaffe_path)
    try:
        import caffe
    except ImportError:
        raise
    # Load net definition.
    net = caffe.Net('./util/deploy.prototxt', args.caffemodel, caffe.TEST)

    # Check the existence of output_dir.
    if not os.path.exists(args.output_dir):
        os.makedirs(args.output_dir)

    # Net skeleton with parameters names and shapes.
    # In TF, the filter shape is as follows: [ks, ks, input_channels, output_channels],
    # while in Caffe it looks like this: [output_channels, input_channels, ks, ks].
    net_skeleton = list() 
    for name, item in net.params.iteritems():
        net_skeleton.append([name + '/w', item[0].data.shape[::-1]]) # See the explanataion on filter formats above.
        net_skeleton.append([name + '/b', item[1].data.shape])

    with open(os.path.join(args.output_dir, 'net_skeleton.ckpt'), 'wb') as f:
        cPickle.dump(net_skeleton, f, protocol=cPickle.HIGHEST_PROTOCOL)

    # Net weights. 
    net_weights = dict()
    for name, item in net.params.iteritems():
        net_weights[name + '/w'] = item[0].data.transpose(2, 3, 1, 0) # See the explanation on filter formats above.
        net_weights[name + '/b'] = item[1].data
    with open(os.path.join(args.output_dir,'net_weights.ckpt'), 'wb') as f:
        cPickle.dump(net_weights, f, protocol=cPickle.HIGHEST_PROTOCOL)
    del net, net_skeleton, net_weights
项目:feagen    作者:ianlini    | 项目源码 | 文件源码
def write_data(self, result_dict):
        for key, val in six.viewitems(result_dict):
            pickle_path = os.path.join(self.pickle_dir, key + ".pkl")
            with SimpleTimer("Writing generated data %s to pickle file" % key,
                             end_in_new_line=False), \
                    open(pickle_path, "wb") as fp:
                cPickle.dump(val, fp, protocol=cPickle.HIGHEST_PROTOCOL)
项目:WassersteinGAN.tensorflow    作者:shekkizh    | 项目源码 | 文件源码
def read_dataset(data_dir):
    pickle_filename = "celebA.pickle"
    pickle_filepath = os.path.join(data_dir, pickle_filename)
    if not os.path.exists(pickle_filepath):
        # utils.maybe_download_and_extract(data_dir, DATA_URL, is_zipfile=True)
        celebA_folder = os.path.splitext(DATA_URL.split("/")[-1])[0]
        dir_path = os.path.join(data_dir, celebA_folder)
        if not os.path.exists(dir_path):
            print ("CelebA dataset needs to be downloaded and unzipped manually")
            print ("Download from: %s" % DATA_URL)
            raise ValueError("Dataset not found")

        result = create_image_lists(dir_path)
        print ("Training set: %d" % len(result['train']))
        print ("Test set: %d" % len(result['test']))
        print ("Validation set: %d" % len(result['validation']))
        print ("Pickling ...")
        with open(pickle_filepath, 'wb') as f:
            pickle.dump(result, f, pickle.HIGHEST_PROTOCOL)
    else:
        print ("Found pickle file!")

    with open(pickle_filepath, 'rb') as f:
        result = pickle.load(f)
        celebA = CelebA_Dataset(result)
        del result
    return celebA
项目:MIL.pytorch    作者:gujiuxiang    | 项目源码 | 文件源码
def save_variables(pickle_file_name, var, info, overwrite=False):
    if os.path.exists(pickle_file_name) and overwrite == False:
        raise Exception('{:s} exists and over write is false.'.format(pickle_file_name))
    # Construct the dictionary
    assert (type(var) == list);
    assert (type(info) == list);
    d = {}
    for i in xrange(len(var)):
        d[info[i]] = var[i]
    with open(pickle_file_name, 'wb') as f:
        cPickle.dump(d, f, cPickle.HIGHEST_PROTOCOL)
项目:pybel    作者:pybel    | 项目源码 | 文件源码
def to_bytes(graph, protocol=HIGHEST_PROTOCOL):
    """Converts a graph to bytes with pickle. Note that the pickle module has some incompatibilities between Python
    2 and 3. To export a universally importable pickle, choose 0, 1, or 2.

    :param BELGraph graph: A BEL network
    :param int protocol: Pickling protocol to use
    :return: Pickled bytes representing the graph
    :rtype: bytes

    .. seealso:: https://docs.python.org/3.6/library/pickle.html#data-stream-format
    """
    raise_for_not_bel(graph)
    return dumps(graph, protocol=protocol)
项目:pybel    作者:pybel    | 项目源码 | 文件源码
def to_pickle(graph, file, protocol=HIGHEST_PROTOCOL):
    """Writes this graph to a pickle object with :func:`networkx.write_gpickle`.  Note that the pickle module has some
    incompatibilities between Python 2 and 3. To export a universally importable pickle, choose 0, 1, or 2.

    :param BELGraph graph: A BEL graph
    :param str or file: A file or filename to write to
    :param int protocol: Pickling protocol to use

    .. seealso:: https://docs.python.org/3.6/library/pickle.html#data-stream-format
    """
    raise_for_not_bel(graph)
    write_gpickle(graph, file, protocol=protocol)
项目:tensorflow-deeplab-lfov    作者:DrSleep    | 项目源码 | 文件源码
def main():
    """Extract and save network skeleton with the corresponding weights.

    Raises:
      ImportError: PyCaffe module is not found."""
    args = get_arguments()
    sys.path.append(args.pycaffe_path)
    try:
        import caffe
    except ImportError:
        raise
    # Load net definition.
    net = caffe.Net('./util/deploy.prototxt', args.caffemodel, caffe.TEST)

    # Check the existence of output_dir.
    if not os.path.exists(args.output_dir):
        os.makedirs(args.output_dir)

    # Net skeleton with parameters names and shapes.
    # In TF, the filter shape is as follows: [ks, ks, input_channels, output_channels],
    # while in Caffe it looks like this: [output_channels, input_channels, ks, ks].
    net_skeleton = list() 
    for name, item in net.params.iteritems():
        net_skeleton.append([name + '/w', item[0].data.shape[::-1]]) # See the explanataion on filter formats above.
        net_skeleton.append([name + '/b', item[1].data.shape])

    with open(os.path.join(args.output_dir, 'net_skeleton.ckpt'), 'wb') as f:
        cPickle.dump(net_skeleton, f, protocol=cPickle.HIGHEST_PROTOCOL)

    # Net weights. 
    net_weights = dict()
    for name, item in net.params.iteritems():
        net_weights[name + '/w'] = item[0].data.transpose(2, 3, 1, 0) # See the explanation on filter formats above.
        net_weights[name + '/b'] = item[1].data
    with open(os.path.join(args.output_dir,'net_weights.ckpt'), 'wb') as f:
        cPickle.dump(net_weights, f, protocol=cPickle.HIGHEST_PROTOCOL)
    del net, net_skeleton, net_weights
项目:serialtime    作者:ianlini    | 项目源码 | 文件源码
def save_pklgz(obj, path, log_description=None, logger=None,
               logging_level=logging.INFO, verbose_start=True,
               verbose_end=True, end_in_new_line=True, log_prefix="..."):
    if log_description is None:
        log_description = "Pickling to " + (path)
    with SimpleTimer(log_description, logger, logging_level, verbose_start,
                     verbose_end, end_in_new_line, log_prefix):
        pkl = cPickle.dumps(obj, protocol=cPickle.HIGHEST_PROTOCOL)
        with gzip.open(path, "wb") as fp:
            fp.write(pkl)
项目:serialtime    作者:ianlini    | 项目源码 | 文件源码
def save_pkl(obj, path, log_description=None, logger=None,
             logging_level=logging.INFO, verbose_start=True,
             verbose_end=True, end_in_new_line=True, log_prefix="..."):
    if log_description is None:
        log_description = "Pickling to " + (path)
    with open(path, "wb") as fp, \
            SimpleTimer(log_description, logger, logging_level, verbose_start,
                        verbose_end, end_in_new_line, log_prefix):
        cPickle.dump(obj, fp, protocol=cPickle.HIGHEST_PROTOCOL)
项目:EBGAN.tensorflow    作者:shekkizh    | 项目源码 | 文件源码
def read_dataset(data_dir):
    pickle_filename = "flowers_data.pickle"
    pickle_filepath = os.path.join(data_dir, pickle_filename)
    if not os.path.exists(pickle_filepath):
        utils.maybe_download_and_extract(data_dir, DATA_URL, is_tarfile=True)
        flower_folder = os.path.splitext(DATA_URL.split("/")[-1])[0]
        result = create_image_lists(os.path.join(data_dir, flower_folder))
        print "Training set: %d" % len(result['train'])
        print "Test set: %d" % len(result['test'])
        print "Validation set: %d" % len(result['validation'])
        print "Pickling ..."
        with open(pickle_filepath, 'wb') as f:
            pickle.dump(result, f, pickle.HIGHEST_PROTOCOL)
    else:
        print "Found pickle file!"

    with open(pickle_filepath, 'rb') as f:
        result = pickle.load(f)
        training_images = result['train']
        testing_images = result['test']
        validation_images = result['validation']

        del result

    print ("Training: %d, Validation: %d, Test: %d" % (
        len(training_images), len(validation_images), len(testing_images)))
    return training_images, testing_images, validation_images
项目:DeepRL-FlappyBird    作者:hashbangCoder    | 项目源码 | 文件源码
def save_queue(EXPERIENCE_MEMORY):
    with open('saved nets/saved_queue_new.pkl','wb') as f:
        cPickle.dump(EXPERIENCE_MEMORY,f,protocol=cPickle.HIGHEST_PROTOCOL)

    call(['rm','saved nets/saved_queue.pkl'])
    call(['mv','saved nets/saved_queue_new.pkl','saved nets/saved_queue.pkl'])
项目:DeepRL-FlappyBird    作者:hashbangCoder    | 项目源码 | 文件源码
def save_queue(EXPERIENCE_MEMORY):
    with open('saved_DDQN/double_dqn_queue_new.pkl','wb') as f:
        cPickle.dump(EXPERIENCE_MEMORY,f,protocol=cPickle.HIGHEST_PROTOCOL)

    call(['rm','saved_DDQN/double_dqn_queue.pkl'])
    call(['mv','saved_DDQN/double_dqn_queue_new.pkl','saved_DDQN/double_dqn_queue.pkl'])
项目:DynamicMemoryNetworks    作者:swstarlab    | 项目源码 | 文件源码
def save_params(self, fname):
        layers = [self.S] + lasagne.layers.get_all_layers(self.A)
        params = chain.from_iterable(l.get_params() for l in layers)
        params = lasagne.utils.unique(params)

        npy_list = [param.get_value(borrow=True) for param in params]

        with open(fname + ".pkl", 'wb') as f:
            pickle.dump(npy_list, f, pickle.HIGHEST_PROTOCOL)
项目:wiki-sem-500    作者:belph    | 项目源码 | 文件源码
def save(self, fname):
    """Save a pickled version of the embedding into `fname`."""

    vec = self.vectors
    voc = self.vocabulary.getstate()
    state = (voc, vec)
    with open(fname, 'wb') as f:
      pickle.dump(state, f, protocol=pickle.HIGHEST_PROTOCOL)
项目:chalktalk_docs    作者:loremIpsum1771    | 项目源码 | 文件源码
def topickle(self, filename):
        # remove unpicklable attributes
        warnfunc = self._warnfunc
        self.set_warnfunc(None)
        values = self.config.values
        del self.config.values
        domains = self.domains
        del self.domains
        picklefile = open(filename, 'wb')
        # remove potentially pickling-problematic values from config
        for key, val in list(vars(self.config).items()):
            if key.startswith('_') or \
               isinstance(val, types.ModuleType) or \
               isinstance(val, types.FunctionType) or \
               isinstance(val, class_types):
                del self.config[key]
        try:
            pickle.dump(self, picklefile, pickle.HIGHEST_PROTOCOL)
        finally:
            picklefile.close()
        # reset attributes
        self.domains = domains
        self.config.values = values
        self.set_warnfunc(warnfunc)

    # --------- ENVIRONMENT INITIALIZATION -------------------------------------
项目:Japan_Mahjong-AI-project    作者:willywsm1013    | 项目源码 | 文件源码
def save(self,pickle_name):
        print ('saving data to ',pikckle_name)
        if self.train :
            f = open(pickle_name, 'wb')
            cPickle.dump(self.qValues, f, protocol=cPickle.HIGHEST_PROTOCOL)
            f.close()
项目:Japan_Mahjong-AI-project    作者:willywsm1013    | 项目源码 | 文件源码
def save(self,pickle_name):
        if self.train :
            print ('saving data to ',pickle_name)
            f = open(pickle_name, 'wb')
            cPickle.dump(self.weights, f, protocol=cPickle.HIGHEST_PROTOCOL)
            f.close()
项目:pt-voicebox    作者:jbrew    | 项目源码 | 文件源码
def save_object(obj, path):
    """saves an object to a file"""
    with open(path, 'wb') as output:
        pickle.dump(obj, output, pickle.HIGHEST_PROTOCOL)
项目:cnn-bnn    作者:jpdz    | 项目源码 | 文件源码
def file_pickle(pickle_files, save, force):
  if force or not os.path.exists(pickle_files):
    try:
      with open(pickle_files,'wb') as f:
        pickle.dump(save, f, pickle.HIGHEST_PROTOCOL)
    except Exception as e:
        print('Unable to save data to', pickle_files, ':', e)
    return pickle_files
项目:odin    作者:imito    | 项目源码 | 文件源码
def __getstate__(self):
    if not self._new_args_called:
      raise RuntimeError(
          "You must use argument `protocol=cPickle.HIGHEST_PROTOCOL` "
          "when using `pickle` or `cPickle` to be able pickling NoSQL.")
    self._new_args_called = False
    return self.path, self.read_only, self.cache_size
项目:odin    作者:imito    | 项目源码 | 文件源码
def __getstate__(self):
    if not self._new_args_called:
      raise RuntimeError(
          "You must use argument `protocol=cPickle.HIGHEST_PROTOCOL` "
          "when using `pickle` or `cPickle` to be able pickling Dataset.")
    self._new_args_called = False
    return self.path, self.read_only
项目:odin    作者:imito    | 项目源码 | 文件源码
def add_recipes(self, recipes, name, override=False):
    """
    Parameters
    ----------
    """
    # ====== validate arguments ====== #
    if not is_string(name):
      raise ValueError("`name` must be string, but given: %s" % str(type(name)))
    if name in self._saved_recipes and not override:
      raise ValueError("Cannot override pre-defined RECIPE with name: '%s'"
                      % name)
    # ====== validate recipes list ====== #
    if isinstance(recipes, RecipeList):
      recipes = tuple(recipes._recipes)
    else:
      tmp = []
      for rcp in as_tuple(recipes, t=FeederRecipe):
        if isinstance(rcp, RecipeList):
          tmp += list(rcp._recipes)
        else:
          tmp.append(rcp)
      recipes = tuple(tmp)
    # ====== store the recipes to disk ====== #
    path = os.path.join(self.recipe_path, name)
    with open(path, 'wb') as f:
      cPickle.dump(recipes, f, protocol=cPickle.HIGHEST_PROTOCOL)
    # ====== update local recipes list ====== #
    self._saved_recipes[name] = recipes
    return self
项目:odin    作者:imito    | 项目源码 | 文件源码
def flush(self):
    for dtype, shape, data, path in self._data_map.values():
      if hasattr(data, 'flush'):
        data.flush()
      elif data is not None: # Flush pickling data
        with open(path, 'wb') as f:
          cPickle.dump(data, f, protocol=cPickle.HIGHEST_PROTOCOL)
项目:odin    作者:imito    | 项目源码 | 文件源码
def func_to_str(func):
  # conver to byte
  code = cPickle.dumps(array("B", marshal.dumps(func.__code__)),
                       protocol=cPickle.HIGHEST_PROTOCOL)
  closure = None
  if func.__closure__ is not None:
    print("[WARNING] function: %s contains closure, which cannot be "
          "serialized." % str(func))
    closure = tuple([c.cell_contents for c in func.__closure__])
  defaults = func.__defaults__
  return (code, closure, defaults)
项目:odin    作者:imito    | 项目源码 | 文件源码
def __init__(self, func, *args, **kwargs):
    super(functionable, self).__init__()
    self._function = func
    self.__name__ = self._function.__name__
    try: # sometime cannot get the source
      self._source = inspect.getsource(self._function)
    except Exception as e:
      print("[WARNING] Cannot get source code of function:", func,
            "(error:%s)" % str(e))
      self._source = None
    # try to pickle the function directly
    try:
      self._sandbox = cPickle.dumps(self._function,
          protocol=cPickle.HIGHEST_PROTOCOL)
    except Exception:
      self._sandbox = _serialize_function_sandbox(func, self._source)
    # ====== store argsmap ====== #
    argspec = inspect.getargspec(func)
    argsmap = OrderedDict([(i, _ArgPlaceHolder_()) for i in argspec.args])
    # store defaults
    if argspec.defaults is not None:
      for name, arg in zip(argspec.args[::-1], argspec.defaults[::-1]):
        argsmap[name] = arg
    # update positional arguments
    for name, arg in zip(argspec.args, args):
      argsmap[name] = arg
    # update kw arguments
    argsmap.update(kwargs)
    self._argsmap = argsmap

  # ==================== Pickling methods ==================== #
项目:odin    作者:imito    | 项目源码 | 文件源码
def is_pickleable(x):
  try:
    cPickle.dumps(x, protocol=cPickle.HIGHEST_PROTOCOL)
    return True
  except cPickle.PickleError:
    return False
项目:odin    作者:imito    | 项目源码 | 文件源码
def __getstate__(self):
    if not self._new_args_called:
      raise RuntimeError(
          "You must use argument `protocol=cPickle.HIGHEST_PROTOCOL` "
          "when using `pickle` or `cPickle` to be able pickling NNOp.")
    self._new_args_called = False
    # add nnops here so all related NNOps are saved
    return self._save_states, self.nnops
项目:GAN    作者:kunrenzhilu    | 项目源码 | 文件源码
def read_dataset(data_dir):
    pickle_filename = "celebA.pickle"
    pickle_filepath = os.path.join(data_dir, pickle_filename)
    if not os.path.exists(pickle_filepath):
        # utils.maybe_download_and_extract(data_dir, DATA_URL, is_zipfile=True)
        celebA_folder = os.path.splitext(DATA_URL.split("/")[-1])[0]
        dir_path = os.path.join(data_dir, celebA_folder)
        if not os.path.exists(dir_path):
            print ("CelebA dataset needs to be downloaded and unzipped manually")
            print ("Download from: %s" % DATA_URL)
            raise ValueError("Dataset not found")

        result = create_image_lists(dir_path)
        print ("Training set: %d" % len(result['train']))
        print ("Test set: %d" % len(result['test']))
        print ("Validation set: %d" % len(result['validation']))
        print ("Pickling ...")
        with open(pickle_filepath, 'wb') as f:
            pickle.dump(result, f, pickle.HIGHEST_PROTOCOL)
    else:
        print ("Found pickle file!")

    with open(pickle_filepath, 'rb') as f:
        result = pickle.load(f)
        celebA = CelebA_Dataset(result)
        del result
    return celebA
项目:self-critical.pytorch    作者:ruotianluo    | 项目源码 | 文件源码
def main(params):

  imgs = json.load(open(params['input_json'], 'r'))
  itow = json.load(open(params['dict_json'], 'r'))['ix_to_word']
  wtoi = {w:i for i,w in itow.items()}

  imgs = imgs['images']

  ngram_words, ngram_idxs, ref_len = build_dict(imgs, wtoi, params)

  cPickle.dump({'document_frequency': ngram_words, 'ref_len': ref_len}, open(params['output_pkl']+'-words.p','w'), protocol=cPickle.HIGHEST_PROTOCOL)
  cPickle.dump({'document_frequency': ngram_idxs, 'ref_len': ref_len}, open(params['output_pkl']+'-idxs.p','w'), protocol=cPickle.HIGHEST_PROTOCOL)
项目:Colorization.tensorflow    作者:shekkizh    | 项目源码 | 文件源码
def read_dataset(data_dir):
    pickle_filename = "flowers_data.pickle"
    pickle_filepath = os.path.join(data_dir, pickle_filename)
    if not os.path.exists(pickle_filepath):
        utils.maybe_download_and_extract(data_dir, DATA_URL, is_tarfile=True)
        flower_folder = os.path.splitext(DATA_URL.split("/")[-1])[0]
        result = create_image_lists(os.path.join(data_dir, flower_folder))
        print "Training set: %d" % len(result['train'])
        print "Test set: %d" % len(result['test'])
        print "Validation set: %d" % len(result['validation'])
        print "Pickling ..."
        with open(pickle_filepath, 'wb') as f:
            pickle.dump(result, f, pickle.HIGHEST_PROTOCOL)
    else:
        print "Found pickle file!"

    with open(pickle_filepath, 'rb') as f:
        result = pickle.load(f)
        training_images = result['train']
        testing_images = result['test']
        validation_images = result['validation']

        del result

    print ("Training: %d, Validation: %d, Test: %d" % (
        len(training_images), len(validation_images), len(testing_images)))
    return training_images, testing_images, validation_images