我们从Python开源项目中,提取了以下8个代码示例,用于说明如何使用keras.backend.random_binomial()。
def call(self, x, mask=None): if self.mode == 'maximum_likelihood': # draw maximum likelihood sample from Bernoulli distribution # x* = argmax_x p(x) = 1 if p(x=1) >= 0.5 # 0 otherwise return K.round(x) elif self.mode == 'random': # draw random sample from Bernoulli distribution # x* = x ~ p(x) = 1 if p(x=1) > uniform(0, 1) # 0 otherwise #return self.srng.binomial(size=x.shape, n=1, p=x, dtype=K.floatx()) return K.random_binomial(x.shape, p=x, dtype=K.floatx()) elif self.mode == 'mean_field': # draw mean-field approximation sample from Bernoulli distribution # x* = E[p(x)] = E[Bern(x; p)] = p return x elif self.mode == 'nrlu': return nrlu(x) else: raise NotImplementedError('Unknown sample mode!')
def sample_h_given_x(self, x): h_pre = K.dot(x, self.Wrbm) + self.bh h_sigm = self.activation(self.scaling_h_given_x * h_pre) # drop out noise #if(0.0 < self.p < 1.0): # noise_shape = self._get_noise_shape(h_sigm) # h_sigm = K.in_train_phase(K.dropout(h_sigm, self.p, noise_shape), h_sigm) if(self.hidden_unit_type == 'binary'): h_samp = K.random_binomial(shape=h_sigm.shape, p=h_sigm) # random sample # \hat{h} = 1, if p(h=1|x) > uniform(0, 1) # 0, otherwise elif(self.hidden_unit_type == 'nrlu'): h_samp = nrlu(h_pre) else: h_samp = h_sigm if(0.0 < self.p < 1.0): noise_shape = self._get_noise_shape(h_samp) h_samp = K.in_train_phase(K.dropout(h_samp, self.p, noise_shape), h_samp) return h_samp, h_pre, h_sigm
def _random_arr(self, count, p): return K.random_binomial((count,), p=p)
def _build_global_switch(self): # A randomly sampled tensor that will signal if the batch # should use global or local droppath return K.equal(K.random_binomial((), p=self.global_p, seed=self.switch_seed), 1.)
def call(self, inputs, training=None): def noised(): return inputs * K.random_binomial(shape=K.shape(inputs), p=self.ratio ) return K.in_train_phase(noised, inputs, training=training)
def sample_x_given_h(self, h): x_pre = K.dot(h, self.Wrbm.T) + self.bx if(self.visible_unit_type == 'gaussian'): x_samp = self.scaling_x_given_h * x_pre return x_samp, x_samp, x_samp else: x_sigm = K.sigmoid(self.scaling_x_given_h * x_pre) x_samp = K.random_binomial(shape=x_sigm.shape, p=x_sigm) return x_samp, x_pre, x_sigm
def _get_sampler_by_string(self, loss): output = self.outputs[0] inputs = self.inputs if loss in ["MSE", "mse", "mean_squared_error"]: output += samplers.random_normal(K.shape(output), mean=0.0, std=1.0) draw_sample = K.function(inputs + [K.learning_phase()], [output]) def sample_gaussian(inputs, use_dropout=False): ''' Helper to draw samples from a gaussian distribution ''' return draw_sample(inputs + [int(use_dropout)])[0] return sample_gaussian elif loss == "binary_crossentropy": output = K.random_binomial(K.shape(output), p=output) draw_sample = K.function(inputs + [K.learning_phase()], [output]) def sample_binomial(inputs, use_dropout=False): ''' Helper to draw samples from a binomial distribution ''' return draw_sample(inputs + [int(use_dropout)])[0] return sample_binomial elif loss in ["mean_absolute_error", "mae", "MAE"]: output += samplers.random_laplace(K.shape(output), mu=0.0, b=1.0) draw_sample = K.function(inputs + [K.learning_phase()], [output]) def sample_laplace(inputs, use_dropout=False): ''' Helper to draw samples from a Laplacian distribution ''' return draw_sample(inputs + [int(use_dropout)])[0] return sample_laplace elif loss == "mixture_of_gaussians": pi, mu, log_sig = densities.split_mixture_of_gaussians(output, self.n_components) samples = samplers.random_gmm(pi, mu, K.exp(log_sig)) draw_sample = K.function(inputs + [K.learning_phase()], [samples]) return lambda inputs, use_dropout: draw_sample(inputs + [int(use_dropout)])[0] else: raise NotImplementedError("Unrecognised loss: %s.\ Cannot build a generic sampler" % loss)
def call(self, x, mask=None): if isinstance(x, list): x,_ = x if mask is not None and isinstance(mask, list): mask,_ = mask if 0. < self.dropout < 1.: retain_p = 1. - self.dropout dims = self.W._keras_shape[:-1] B = K.random_binomial(dims, p=retain_p) * (1. / retain_p) B = K.expand_dims(B) W = K.in_train_phase(self.W * B, self.W) else: W = self.W if self.mode == 'matrix': return K.gather(W,x) elif self.mode == 'tensor': # quick and dirty: only allowing for 3dim inputs when it's tensor mode assert K.ndim(x) == 3 # put sequence on first; gather; take diagonal across shared batch dimension # in other words, W is (B, S, F) # incoming x is (B, S, A) inds = K.arange(self.W._keras_shape[0]) #out = K.gather(K.permute_dimensions(W, (1,0,2)), x).diagonal(axis1=0, axis2=3) #return K.permute_dimensions(out, (3,0,1,2)) ### method above doesn't do grads =.= # tensor abc goes to bac, indexed onto with xyz, goes to xyzac, # x == a, so shape to xayzc == xxyzc # take diagonal on first two: xyzc #out = K.colgather() out = K.gather(K.permute_dimensions(W, (1,0,2)), x) out = K.permute_dimensions(out, (0,3,1,2,4)) out = K.gather(out, (inds, inds)) return out else: raise Exception('sanity check. should not be here.') #all_dims = T.arange(len(self.W._keras_shape)) #first_shuffle = [all_dims[self.embed_dim]] + all_dims[:self.embed_dim] + all_dims[self.embed_dim+1:] ## 1. take diagonal from 0th to ## chang eof tactics ## embed on time or embed on batch. that's all I'm supporting. ## if it's embed on time, then, x.ndim+1 is where batch will be, and is what ## i need to take the diagonal over. ## now dim shuffle the xdims + 1 to the front. #todo: get second shuffle or maybe find diagonal calculations #out = K.gather(W, x) #return out ### reference #A = S(np.arange(60).reshape(3,4,5)) #x = S(np.random.randint(0, 4, (3,4,10))) #x_emb = A.dimshuffle(1,0,2)[x].dimshuffle(0,3,1,2,4)[T.arange(A.shape[0]), T.arange(A.shape[0])]