# What’s in TensorFlow Probability? An overview of TensorFlow Probability. The probabilistic programming toolbox provides benefits for users ranging from Data Scientists and Statisticians to all TensorFlow Users.

# Let’s see some examples!

## Linear Mixed Effects Models with Edward2

`import tensorflow as tffrom tensorflow_probability import edward2 as eddef model(features):  # Set up fixed effects and other parameters.  intercept = tf.get_variable("intercept", [])  service_effects = tf.get_variable("service_effects", [])  student_stddev_unconstrained = tf.get_variable(      "student_stddev_pre", [])  instructor_stddev_unconstrained = tf.get_variable(      "instructor_stddev_pre", [])  # Set up random effects.  student_effects = ed.MultivariateNormalDiag(      loc=tf.zeros(num_students),      scale_identity_multiplier=tf.exp(          student_stddev_unconstrained),      name="student_effects")  instructor_effects = ed.MultivariateNormalDiag(      loc=tf.zeros(num_instructors),      scale_identity_multiplier=tf.exp(          instructor_stddev_unconstrained),      name="instructor_effects")  # Set up likelihood given fixed and random effects.  ratings = ed.Normal(      loc=(service_effects * features["service"] +           tf.gather(student_effects, features["students"]) +           tf.gather(instructor_effects, features["instructors"]) +           intercept),      scale=1.,      name="ratings")return ratings`

## Gaussian Copulas with TFP Bijectors

`import tensorflow_probability as tfptfd = tfp.distributionstfb = tfp.distributions.bijectors# Example: Log-Normal Distributionlog_normal = tfd.TransformedDistribution(    distribution=tfd.Normal(loc=0., scale=1.),    bijector=tfb.Exp())# Example: Kumaraswamy DistributionKumaraswamy = tfd.TransformedDistribution(    distribution=tfd.Uniform(low=0., high=1.),    bijector=tfb.Kumaraswamy(        concentration1=2.,        concentration0=2.))# Example: Masked Autoregressive Flow# https://arxiv.org/abs/1705.07057shift_and_log_scale_fn = tfb.masked_autoregressive_default_template(    hidden_layers=[512, 512],    event_shape=[28*28])maf = tfd.TransformedDistribution(    distribution=tfd.Normal(loc=0., scale=1.),         bijector=tfb.MaskedAutoregressiveFlow(        shift_and_log_scale_fn=shift_and_log_scale_fn))`

## Variational Autoencoder with TFP Utilities

`import tensorflow as tfimport tensorflow_probability as tfp# Assumes user supplies `likelihood`, `prior`, `surrogate_posterior`# functions and that each returns a # tf.distribution.Distribution-like object.elbo_loss = tfp.vi.monte_carlo_csiszar_f_divergence(    f=tfp.vi.kl_reverse,  # Equivalent to "Evidence Lower BOund"    p_log_prob=lambda z: likelihood(z).log_prob(x) + prior().log_prob(z),    q=surrogate_posterior(x),    num_draws=1)train = tf.train.AdamOptimizer(    learning_rate=0.01).minimize(elbo_loss)`

## Bayesian Neural Networks with TFP Probabilistic Layers

`import tensorflow as tfimport tensorflow_probability as tfpmodel = tf.keras.Sequential([    tf.keras.layers.Reshape([32, 32, 3]),    tfp.layers.Convolution2DFlipout(        64, kernel_size=5, padding='SAME', activation=tf.nn.relu),    tf.keras.layers.MaxPooling2D(pool_size=[2, 2],                                 strides=[2, 2],                                 padding='SAME'),    tf.keras.layers.Reshape([16 * 16 * 64]),    tfp.layers.DenseFlipout(10)])logits = model(features)neg_log_likelihood = tf.nn.softmax_cross_entropy_with_logits(    labels=labels, logits=logits)kl = sum(model.get_losses_for(inputs=None))loss = neg_log_likelihood + kltrain_op = tf.train.AdamOptimizer().minimize(loss)`
`class MNISTModel(tf.keras.Model):  def __init__(self):    super(MNISTModel, self).__init__()    self.dense1 = tfp.layers.DenseFlipout(units=10)    self.dense2 = tfp.layers.DenseFlipout(units=10)  def call(self, input):    """Run the model."""    result = self.dense1(input)    result = self.dense2(result)    # reuse variables from dense2 layer    result = self.dense2(result)      return resultmodel = MNISTModel()`

# Getting started

`pip install --user --upgrade tfp-nightly`

Written by

Written by

## TensorFlow

#### TensorFlow is a fast, flexible, and scalable open-source machine learning library for research and production. 