我们从Python开源项目中,提取了以下10个代码示例,用于说明如何使用scipy.stats.poisson()。
def setUp(self): ''' Saves the current random state for later recovery, sets the random seed to get reproducible results and manually constructs a mixed vine. ''' # Save random state for later recovery self.random_state = np.random.get_state() # Set fixed random seed np.random.seed(0) # Manually construct mixed vine self.dim = 3 # Dimension self.vine = MixedVine(self.dim) # Specify marginals self.vine.set_marginal(0, norm(0, 1)) self.vine.set_marginal(1, poisson(5)) self.vine.set_marginal(2, gamma(2, 0, 4)) # Specify pair copulas self.vine.set_copula(1, 0, GaussianCopula(0.5)) self.vine.set_copula(1, 1, FrankCopula(4)) self.vine.set_copula(2, 0, ClaytonCopula(5))
def nbinom(self,samples): """ Sampling from a Negative binomial distribution Parameters: mu= poissonon mean r controls the deviation from the poisson This makes the negative binomial distribution suitable as a robust alternative to the Poisson, which approaches the Poisson for large r, but which has larger variance than the Poisson for small r. ------------------------------------------------------------------------ - samples: number of values that will be returned. """ mu=float(self.__params[0]) r=float(self.__params[1]) p=(r*1.0)/(r+mu) distro=nbinom(r,p) f=distro.rvs(size=samples) return f
def sample_spatial_poisson_process(self, rate): xmin, xmax = self.x_range ymin, ymax = self.y_range dx = xmax - xmin dy = ymax - ymin N = stats.poisson( rate * dx * dy ).rvs() x = stats.uniform.rvs(xmin, dx, ((N, 1)) ) y = stats.uniform.rvs(ymin, dy, ((N, 1)) ) centers = np.hstack((x,y)) return centers
def poissonRnd(scale, size=None): result = np.random.poisson(scale, size) return result
def value(self,samples=1): """ Samples number of values given from the specific distribution. ------------------------------------------------------------------------ - samples: number of values that will be returned. """ value=0 try: for item in self.__params: if item==0: break if item==0: value=[0]*samples else: if self.__name=="b": value=self.binom(samples) if self.__name=="e": value=self.exponential(samples) if self.__name=="f": value=self.fixed(samples) if self.__name=="g": value=self.gamma(samples) if self.__name=="g1": value=self.gamma1(samples) if self.__name=="ln": value=self.lognormal(samples) if self.__name=="n": value=self.normal(samples) if self.__name=="nb": value=self.nbinom(samples) if self.__name=="p": value=self.poisson(samples) if self.__name=="u": value=self.uniform(samples) except Exception as ex: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] message="\n\tUnexpected: {0} | {1} - File: {2} - Line:{3}".format(\ ex,exc_type, fname, exc_tb.tb_lineno) status=False raise Exception(message) # self.appLogger.error(message) # sys.exit() return value
def poisson(self,samples): """ Sampling from a Poisson distribution Parameters: mean ------------------------------------------------------------------------ - samples: number of values that will be returned. """ l=float(self.__params[0]*1.0) distro=poisson(l) f=distro.rvs(size=samples) return f
def gen(self, normal_mu_range, anomaly_mu_range): self.gens = [ compound_distribution( stats.uniform(loc=anomaly_mu_range[0], scale=anomaly_mu_range[1] - anomaly_mu_range[0]), truncated(stats.poisson, max_value=1024) ), compound_distribution( stats.uniform(loc=normal_mu_range[0], scale=normal_mu_range[1] - normal_mu_range[0]), truncated(stats.poisson, max_value=1024) ) ] self.priors = np.array([0.1, 0.9]) n = 10 MC = CameraMC(self.priors, self.gens, image_shape=(1, n, n), n_frames=100) self.cats, self.params, self.imgs = MC.get_sample() self.hists = ndcount(self.imgs).reshape(n, n, -1) self.hists = self.hists.astype('float32') / np.sum(self.hists, axis=2)[:, :, None] self.cats = self.cats.reshape(-1) print("Img shape %s" % (self.imgs.shape, )) print("Hists shape %s" % (self.hists.shape, )) print("Categories shape %s" % (self.cats.shape, ))
def gen(self, normal_mu_range, anomaly_mu_range): self.gens = [ compound_distribution( stats.uniform(loc=anomaly_mu_range[0], scale=anomaly_mu_range[1] - anomaly_mu_range[0]), truncated(stats.poisson, max_value=1024) ), compound_distribution( stats.uniform(loc=normal_mu_range[0], scale=normal_mu_range[1] - normal_mu_range[0]), truncated(stats.poisson, max_value=1024) ) ] self.priors = np.array([0.1, 0.9]) n = 100 m = 10 bins = 64 MC = CameraMC(self.priors, self.gens, image_shape=(1, n, ), n_frames=100, max_value=bins) X = np.ndarray(shape=(m, n, bins), dtype='float32') cats = np.ndarray(shape=(m, n), dtype='float32') for i in xrange(m): cats[i], _, imgs = MC.get_sample() h = ndcount(imgs, bins=bins) print h.shape h = h.reshape(n, bins) X[i] = h.astype('float32') / np.sum(h, axis=1)[:, None] print("X shape %s" % (X.shape, )) print("Categories shape %s" % (cats.shape, )) self.X = X self.cats = cats
def __init__(self, parameter_distribution = stats.gamma, signal_family = stats.poisson): self.parameter_distribution = parameter_distribution self.signal_family = signal_family
def fit(samples, is_continuous): ''' Fits a distribution to the given samples. Parameters ---------- samples : array_like Array of samples. is_continuous : bool If `True` then a continuous distribution is fitted. Otherwise, a discrete distribution is fitted. Returns ------- best_marginal : Marginal The distribution fitted to `samples`. ''' # Mean and variance mean = np.mean(samples) var = np.var(samples) # Set suitable distributions if is_continuous: if np.any(samples <= 0): options = [norm] else: options = [norm, gamma] else: if var > mean: options = [poisson, binom, nbinom] else: options = [poisson, binom] params = np.empty(len(options), dtype=object) marginals = np.empty(len(options), dtype=object) # Fit parameters and construct marginals for i, dist in enumerate(options): if dist == poisson: params[i] = [mean] elif dist == binom: param_n = np.max(samples) param_p = np.sum(samples) / (param_n * len(samples)) params[i] = [param_n, param_p] elif dist == nbinom: param_n = mean * mean / (var - mean) param_p = mean / var params[i] = [param_n, param_p] else: params[i] = dist.fit(samples) rv_mixed = dist(*params[i]) marginals[i] = Marginal(rv_mixed) # Calculate Akaike information criterion aic = np.zeros(len(options)) for i, marginal in enumerate(marginals): aic[i] = 2 * len(params[i]) \ - 2 * np.sum(marginal.logpdf(samples)) best_marginal = marginals[np.argmin(aic)] return best_marginal