1 # This cell can take 12 minutes to run in Graph mode
2 # Number of samples and burnin for the MCMC sampler
3 samples = 10000
4 burnin = 10000
5
6 # Initial state for the HMC
7 initial_state = [0., 0.]
8 # Converting the data into tensors
9 cycles = tf.convert_to_tensor(t_,tf.float32)
10 observations = tf.convert_to_tensor(y_,tf.float32)
11 y0 = tf.convert_to_tensor(y_[0], tf.float32)
12 # Setting up a target posterior for our joint logprobability
13 unormalized_target_posterior= lambda *args: joint_log_prob(cycles, observations, y0, *args)
14 # And finally setting up the mcmc sampler
15 [logC_samples, m_samples], kernel_results = tfp.mcmc.sample_chain(
16 num_results= samples,
17 num_burnin_steps= burnin,
18 current_state=initial_state,
19 kernel= tfp.mcmc.HamiltonianMonteCarlo(
20 target_log_prob_fn=unormalized_target_posterior,
21 step_size = 0.045,
22 num_leapfrog_steps=6))
23
24
25 # Tracking the acceptance rate for the sampled chain
26 acceptance_rate = tf.reduce_mean(tf.to_float(kernel_results.is_accepted))
27
28 # Actually running the sampler
29 # The evaluate() function, defined at the top of this notebook, runs `sess.run()
30 # in graph mode and allows code to be executed eagerly when Eager mode is enabled
31 [logC_samples_, m_samples_, acceptance_rate_] = evaluate([
32 logC_samples, m_samples, acceptance_rate])
33
34 # Some initial results
35 print('acceptance_rate:', acceptance_rate_)