Python tensorflow 模块,random_gamma() 实例源码

我们从Python开源项目中,提取了以下9个代码示例,用于说明如何使用tensorflow.random_gamma()

项目:zhusuan    作者:thu-ml    | 项目源码 | 文件源码
def _sample(self, n_samples):
        return tf.random_gamma([n_samples], self.alpha,
                               beta=self.beta, dtype=self.dtype)
项目:zhusuan    作者:thu-ml    | 项目源码 | 文件源码
def _sample(self, n_samples):
        alpha, beta = maybe_explicit_broadcast(
            self.alpha, self.beta, 'alpha', 'beta')
        x = tf.random_gamma([n_samples], alpha, beta=1, dtype=self.dtype)
        y = tf.random_gamma([n_samples], beta, beta=1, dtype=self.dtype)
        return x / (x + y)
项目:zhusuan    作者:thu-ml    | 项目源码 | 文件源码
def _sample(self, n_samples):
        gamma = tf.random_gamma([n_samples], self.alpha,
                                beta=self.beta, dtype=self.dtype)
        return 1 / gamma
项目:zhusuan    作者:thu-ml    | 项目源码 | 文件源码
def _sample(self, n_samples):
        samples = tf.random_gamma([n_samples], self.alpha,
                                  beta=1, dtype=self.dtype)
        return samples / tf.reduce_sum(samples, -1, keep_dims=True)
项目:aboleth    作者:data61    | 项目源码 | 文件源码
def _initialise_variables(self, X):
        """Initialise the impute variables."""
        datadim = int(X.shape[2])
        impute_means = tf.Variable(
            tf.random_normal(shape=(1, datadim), seed=next(seedgen)),
            name="impute_scalars"
        )
        impute_stddev = tf.Variable(
            tf.random_gamma(alpha=1., shape=(1, datadim), seed=next(seedgen)),
            name="impute_scalars"
        )
        self.normal = tf.distributions.Normal(
            impute_means,
            tf.sqrt(pos(impute_stddev))
        )
项目:aboleth    作者:data61    | 项目源码 | 文件源码
def norm_posterior(dim, std0):
    """Initialise a posterior (diagonal) Normal distribution.

    Parameters
    ----------
    dim : tuple or list
        the dimension of this distribution.
    std0 : float
        the initial (unoptimized) standard deviation of this distribution.

    Returns
    -------
    Q : tf.distributions.Normal
        the initialised posterior Normal object.

    Note
    ----
    This will make tf.Variables on the randomly initialised mean and standard
    deviation of the posterior. The initialisation of the mean is from a Normal
    with zero mean, and ``std0`` standard deviation, and the initialisation of
    the standard deviation is from a gamma distribution with an alpha of
    ``std0`` and a beta of 1.

    """
    mu_0 = tf.random_normal(dim, stddev=std0, seed=next(seedgen))
    mu = tf.Variable(mu_0, name="W_mu_q")

    std_0 = tf.random_gamma(alpha=std0, shape=dim, seed=next(seedgen))
    std = pos(tf.Variable(std_0, name="W_std_q"))

    Q = tf.distributions.Normal(loc=mu, scale=std)
    return Q
项目:TerpreT    作者:51alg    | 项目源码 | 文件源码
def log_dirichlet(self, size, scale=1.0):
        mu = tf.random_gamma([1], scale * np.ones(size).astype(np.float32))
        mu = tf.log(mu / tf.reduce_sum(mu))
        return mu
项目:tensorforce    作者:reinforceio    | 项目源码 | 文件源码
def tf_sample(self, distr_params, deterministic):
        alpha, beta, alpha_beta, _ = distr_params

        # Deterministic: mean as action
        definite = beta / alpha_beta

        # Non-deterministic: sample action using gamma distribution
        alpha_sample = tf.random_gamma(shape=(), alpha=alpha)
        beta_sample = tf.random_gamma(shape=(), alpha=beta)

        sampled = beta_sample / tf.maximum(x=(alpha_sample + beta_sample), y=util.epsilon)

        return self.min_value + (self.max_value - self.min_value) * \
            tf.where(condition=deterministic, x=definite, y=sampled)
项目:aboleth    作者:data61    | 项目源码 | 文件源码
def gaus_posterior(dim, std0):
    """Initialise a posterior Gaussian distribution with a diagonal covariance.

    Even though this is initialised with a diagonal covariance, a full
    covariance will be learned, using a lower triangular Cholesky
    parameterisation.

    Parameters
    ----------
    dim : tuple or list
        the dimension of this distribution.
    std0 : float
        the initial (unoptimized) diagonal standard deviation of this
        distribution.

    Returns
    -------
    Q : tf.contrib.distributions.MultivariateNormalTriL
        the initialised posterior Gaussian object.

    Note
    ----
    This will make tf.Variables on the randomly initialised mean and covariance
    of the posterior. The initialisation of the mean is from a Normal with zero
    mean, and ``std0`` standard deviation, and the initialisation of the (lower
    triangular of the) covariance is from a gamma distribution with an alpha of
    ``std0`` and a beta of 1.

    """
    o, i = dim

    # Optimize only values in lower triangular
    u, v = np.tril_indices(i)
    indices = (u * i + v)[:, np.newaxis]
    l0 = np.tile(np.eye(i), [o, 1, 1])[:, u, v].T
    l0 = l0 * tf.random_gamma(alpha=std0, shape=l0.shape, seed=next(seedgen))
    lflat = tf.Variable(l0, name="W_cov_q")
    Lt = tf.transpose(tf.scatter_nd(indices, lflat, shape=(i * i, o)))
    L = tf.reshape(Lt, (o, i, i))

    mu_0 = tf.random_normal((o, i), stddev=std0, seed=next(seedgen))
    mu = tf.Variable(mu_0, name="W_mu_q")
    Q = MultivariateNormalTriL(mu, L)
    return Q


#
# KL divergence calculations
#