doc_content stringlengths 1 386k | doc_id stringlengths 5 188 |
|---|---|
entropy() [source] | torch.distributions#torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.entropy |
expand(batch_shape, _instance=None) [source] | torch.distributions#torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.expand |
has_rsample = True | torch.distributions#torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.has_rsample |
log_prob(value) [source] | torch.distributions#torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.log_prob |
property mean | torch.distributions#torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.mean |
precision_matrix [source] | torch.distributions#torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.precision_matrix |
rsample(sample_shape=torch.Size([])) [source] | torch.distributions#torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.rsample |
scale_tril [source] | torch.distributions#torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.scale_tril |
support = IndependentConstraint(Real(), 1) | torch.distributions#torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.support |
variance [source] | torch.distributions#torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.variance |
class torch.distributions.mixture_same_family.MixtureSameFamily(mixture_distribution, component_distribution, validate_args=None) [source]
Bases: torch.distributions.distribution.Distribution The MixtureSameFamily distribution implements a (batch of) mixture distribution where all component are from different paramet... | torch.distributions#torch.distributions.mixture_same_family.MixtureSameFamily |
arg_constraints: Dict[str, torch.distributions.constraints.Constraint] = {} | torch.distributions#torch.distributions.mixture_same_family.MixtureSameFamily.arg_constraints |
cdf(x) [source] | torch.distributions#torch.distributions.mixture_same_family.MixtureSameFamily.cdf |
property component_distribution | torch.distributions#torch.distributions.mixture_same_family.MixtureSameFamily.component_distribution |
expand(batch_shape, _instance=None) [source] | torch.distributions#torch.distributions.mixture_same_family.MixtureSameFamily.expand |
has_rsample = False | torch.distributions#torch.distributions.mixture_same_family.MixtureSameFamily.has_rsample |
log_prob(x) [source] | torch.distributions#torch.distributions.mixture_same_family.MixtureSameFamily.log_prob |
property mean | torch.distributions#torch.distributions.mixture_same_family.MixtureSameFamily.mean |
property mixture_distribution | torch.distributions#torch.distributions.mixture_same_family.MixtureSameFamily.mixture_distribution |
sample(sample_shape=torch.Size([])) [source] | torch.distributions#torch.distributions.mixture_same_family.MixtureSameFamily.sample |
property support | torch.distributions#torch.distributions.mixture_same_family.MixtureSameFamily.support |
property variance | torch.distributions#torch.distributions.mixture_same_family.MixtureSameFamily.variance |
class torch.distributions.multinomial.Multinomial(total_count=1, probs=None, logits=None, validate_args=None) [source]
Bases: torch.distributions.distribution.Distribution Creates a Multinomial distribution parameterized by total_count and either probs or logits (but not both). The innermost dimension of probs indexe... | torch.distributions#torch.distributions.multinomial.Multinomial |
arg_constraints = {'logits': IndependentConstraint(Real(), 1), 'probs': Simplex()} | torch.distributions#torch.distributions.multinomial.Multinomial.arg_constraints |
expand(batch_shape, _instance=None) [source] | torch.distributions#torch.distributions.multinomial.Multinomial.expand |
property logits | torch.distributions#torch.distributions.multinomial.Multinomial.logits |
log_prob(value) [source] | torch.distributions#torch.distributions.multinomial.Multinomial.log_prob |
property mean | torch.distributions#torch.distributions.multinomial.Multinomial.mean |
property param_shape | torch.distributions#torch.distributions.multinomial.Multinomial.param_shape |
property probs | torch.distributions#torch.distributions.multinomial.Multinomial.probs |
sample(sample_shape=torch.Size([])) [source] | torch.distributions#torch.distributions.multinomial.Multinomial.sample |
property support | torch.distributions#torch.distributions.multinomial.Multinomial.support |
total_count: int = None | torch.distributions#torch.distributions.multinomial.Multinomial.total_count |
property variance | torch.distributions#torch.distributions.multinomial.Multinomial.variance |
class torch.distributions.multivariate_normal.MultivariateNormal(loc, covariance_matrix=None, precision_matrix=None, scale_tril=None, validate_args=None) [source]
Bases: torch.distributions.distribution.Distribution Creates a multivariate normal (also called Gaussian) distribution parameterized by a mean vector and a... | torch.distributions#torch.distributions.multivariate_normal.MultivariateNormal |
arg_constraints = {'covariance_matrix': PositiveDefinite(), 'loc': IndependentConstraint(Real(), 1), 'precision_matrix': PositiveDefinite(), 'scale_tril': LowerCholesky()} | torch.distributions#torch.distributions.multivariate_normal.MultivariateNormal.arg_constraints |
covariance_matrix [source] | torch.distributions#torch.distributions.multivariate_normal.MultivariateNormal.covariance_matrix |
entropy() [source] | torch.distributions#torch.distributions.multivariate_normal.MultivariateNormal.entropy |
expand(batch_shape, _instance=None) [source] | torch.distributions#torch.distributions.multivariate_normal.MultivariateNormal.expand |
has_rsample = True | torch.distributions#torch.distributions.multivariate_normal.MultivariateNormal.has_rsample |
log_prob(value) [source] | torch.distributions#torch.distributions.multivariate_normal.MultivariateNormal.log_prob |
property mean | torch.distributions#torch.distributions.multivariate_normal.MultivariateNormal.mean |
precision_matrix [source] | torch.distributions#torch.distributions.multivariate_normal.MultivariateNormal.precision_matrix |
rsample(sample_shape=torch.Size([])) [source] | torch.distributions#torch.distributions.multivariate_normal.MultivariateNormal.rsample |
scale_tril [source] | torch.distributions#torch.distributions.multivariate_normal.MultivariateNormal.scale_tril |
support = IndependentConstraint(Real(), 1) | torch.distributions#torch.distributions.multivariate_normal.MultivariateNormal.support |
property variance | torch.distributions#torch.distributions.multivariate_normal.MultivariateNormal.variance |
class torch.distributions.negative_binomial.NegativeBinomial(total_count, probs=None, logits=None, validate_args=None) [source]
Bases: torch.distributions.distribution.Distribution Creates a Negative Binomial distribution, i.e. distribution of the number of successful independent and identical Bernoulli trials before... | torch.distributions#torch.distributions.negative_binomial.NegativeBinomial |
arg_constraints = {'logits': Real(), 'probs': HalfOpenInterval(lower_bound=0.0, upper_bound=1.0), 'total_count': GreaterThanEq(lower_bound=0)} | torch.distributions#torch.distributions.negative_binomial.NegativeBinomial.arg_constraints |
expand(batch_shape, _instance=None) [source] | torch.distributions#torch.distributions.negative_binomial.NegativeBinomial.expand |
logits [source] | torch.distributions#torch.distributions.negative_binomial.NegativeBinomial.logits |
log_prob(value) [source] | torch.distributions#torch.distributions.negative_binomial.NegativeBinomial.log_prob |
property mean | torch.distributions#torch.distributions.negative_binomial.NegativeBinomial.mean |
property param_shape | torch.distributions#torch.distributions.negative_binomial.NegativeBinomial.param_shape |
probs [source] | torch.distributions#torch.distributions.negative_binomial.NegativeBinomial.probs |
sample(sample_shape=torch.Size([])) [source] | torch.distributions#torch.distributions.negative_binomial.NegativeBinomial.sample |
support = IntegerGreaterThan(lower_bound=0) | torch.distributions#torch.distributions.negative_binomial.NegativeBinomial.support |
property variance | torch.distributions#torch.distributions.negative_binomial.NegativeBinomial.variance |
class torch.distributions.normal.Normal(loc, scale, validate_args=None) [source]
Bases: torch.distributions.exp_family.ExponentialFamily Creates a normal (also called Gaussian) distribution parameterized by loc and scale. Example: >>> m = Normal(torch.tensor([0.0]), torch.tensor([1.0]))
>>> m.sample() # normally dis... | torch.distributions#torch.distributions.normal.Normal |
arg_constraints = {'loc': Real(), 'scale': GreaterThan(lower_bound=0.0)} | torch.distributions#torch.distributions.normal.Normal.arg_constraints |
cdf(value) [source] | torch.distributions#torch.distributions.normal.Normal.cdf |
entropy() [source] | torch.distributions#torch.distributions.normal.Normal.entropy |
expand(batch_shape, _instance=None) [source] | torch.distributions#torch.distributions.normal.Normal.expand |
has_rsample = True | torch.distributions#torch.distributions.normal.Normal.has_rsample |
icdf(value) [source] | torch.distributions#torch.distributions.normal.Normal.icdf |
log_prob(value) [source] | torch.distributions#torch.distributions.normal.Normal.log_prob |
property mean | torch.distributions#torch.distributions.normal.Normal.mean |
rsample(sample_shape=torch.Size([])) [source] | torch.distributions#torch.distributions.normal.Normal.rsample |
sample(sample_shape=torch.Size([])) [source] | torch.distributions#torch.distributions.normal.Normal.sample |
property stddev | torch.distributions#torch.distributions.normal.Normal.stddev |
support = Real() | torch.distributions#torch.distributions.normal.Normal.support |
property variance | torch.distributions#torch.distributions.normal.Normal.variance |
class torch.distributions.one_hot_categorical.OneHotCategorical(probs=None, logits=None, validate_args=None) [source]
Bases: torch.distributions.distribution.Distribution Creates a one-hot categorical distribution parameterized by probs or logits. Samples are one-hot coded vectors of size probs.size(-1). Note The pr... | torch.distributions#torch.distributions.one_hot_categorical.OneHotCategorical |
arg_constraints = {'logits': IndependentConstraint(Real(), 1), 'probs': Simplex()} | torch.distributions#torch.distributions.one_hot_categorical.OneHotCategorical.arg_constraints |
entropy() [source] | torch.distributions#torch.distributions.one_hot_categorical.OneHotCategorical.entropy |
enumerate_support(expand=True) [source] | torch.distributions#torch.distributions.one_hot_categorical.OneHotCategorical.enumerate_support |
expand(batch_shape, _instance=None) [source] | torch.distributions#torch.distributions.one_hot_categorical.OneHotCategorical.expand |
has_enumerate_support = True | torch.distributions#torch.distributions.one_hot_categorical.OneHotCategorical.has_enumerate_support |
property logits | torch.distributions#torch.distributions.one_hot_categorical.OneHotCategorical.logits |
log_prob(value) [source] | torch.distributions#torch.distributions.one_hot_categorical.OneHotCategorical.log_prob |
property mean | torch.distributions#torch.distributions.one_hot_categorical.OneHotCategorical.mean |
property param_shape | torch.distributions#torch.distributions.one_hot_categorical.OneHotCategorical.param_shape |
property probs | torch.distributions#torch.distributions.one_hot_categorical.OneHotCategorical.probs |
sample(sample_shape=torch.Size([])) [source] | torch.distributions#torch.distributions.one_hot_categorical.OneHotCategorical.sample |
support = OneHot() | torch.distributions#torch.distributions.one_hot_categorical.OneHotCategorical.support |
property variance | torch.distributions#torch.distributions.one_hot_categorical.OneHotCategorical.variance |
class torch.distributions.pareto.Pareto(scale, alpha, validate_args=None) [source]
Bases: torch.distributions.transformed_distribution.TransformedDistribution Samples from a Pareto Type 1 distribution. Example: >>> m = Pareto(torch.tensor([1.0]), torch.tensor([1.0]))
>>> m.sample() # sample from a Pareto distributio... | torch.distributions#torch.distributions.pareto.Pareto |
arg_constraints: Dict[str, torch.distributions.constraints.Constraint] = {'alpha': GreaterThan(lower_bound=0.0), 'scale': GreaterThan(lower_bound=0.0)} | torch.distributions#torch.distributions.pareto.Pareto.arg_constraints |
entropy() [source] | torch.distributions#torch.distributions.pareto.Pareto.entropy |
expand(batch_shape, _instance=None) [source] | torch.distributions#torch.distributions.pareto.Pareto.expand |
property mean | torch.distributions#torch.distributions.pareto.Pareto.mean |
property support | torch.distributions#torch.distributions.pareto.Pareto.support |
property variance | torch.distributions#torch.distributions.pareto.Pareto.variance |
class torch.distributions.poisson.Poisson(rate, validate_args=None) [source]
Bases: torch.distributions.exp_family.ExponentialFamily Creates a Poisson distribution parameterized by rate, the rate parameter. Samples are nonnegative integers, with a pmf given by rateke−ratek!\mathrm{rate}^k \frac{e^{-\mathrm{rate}}}{k... | torch.distributions#torch.distributions.poisson.Poisson |
arg_constraints = {'rate': GreaterThan(lower_bound=0.0)} | torch.distributions#torch.distributions.poisson.Poisson.arg_constraints |
expand(batch_shape, _instance=None) [source] | torch.distributions#torch.distributions.poisson.Poisson.expand |
log_prob(value) [source] | torch.distributions#torch.distributions.poisson.Poisson.log_prob |
property mean | torch.distributions#torch.distributions.poisson.Poisson.mean |
sample(sample_shape=torch.Size([])) [source] | torch.distributions#torch.distributions.poisson.Poisson.sample |
support = IntegerGreaterThan(lower_bound=0) | torch.distributions#torch.distributions.poisson.Poisson.support |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.