|
| 1 | +# -*- coding: utf-8 -*- |
| 2 | +""" |
| 3 | +Created on Wed Oct 09 12:08:57 2013 |
| 4 | +
|
| 5 | +@author: AlbertoEAF |
| 6 | +""" |
| 7 | + |
| 8 | +from methods import * |
| 9 | + |
| 10 | + |
| 11 | + |
| 12 | +#sigma = 0.1 |
| 13 | +#beta = 1.0 / pow(sigma,2) # this is the beta used in Bishop Eqn. 6.59 |
| 14 | +N_test = 100 |
| 15 | +x_test = np.linspace(-1,1,N_test); |
| 16 | +mu_test = np.zeros( N_test ) |
| 17 | +# |
| 18 | +#def true_mean_function( x ): |
| 19 | +# return np.sin( 2*pi*(x+1) ) |
| 20 | +# |
| 21 | +#def add_noise( y, sigma ): |
| 22 | +# return y + sigma*np.random.randn(len(y)) |
| 23 | +# |
| 24 | +#def generate_t( x, sigma ): |
| 25 | +# return add_noise( true_mean_function( x), sigma ) |
| 26 | +# |
| 27 | +#y_test = true_mean_function( x_test ) |
| 28 | +#t_test = add_noise( y_test, sigma ) |
| 29 | +#pp.plot( x_test, y_test, 'b-', lw=2) |
| 30 | +#pp.plot( x_test, t_test, 'go') |
| 31 | + |
| 32 | + |
| 33 | +def gp_plot( x_test, y_test, mu_test, var_test, x_train, t_train, theta, beta ): |
| 34 | + # x_test: |
| 35 | + # y_test: the true function at x_test |
| 36 | + # mu_test: predictive mean at x_test |
| 37 | + # var_test: predictive covariance at x_test |
| 38 | + # t_train: the training values |
| 39 | + # theta: the kernel parameters |
| 40 | + # beta: the precision (known) |
| 41 | + |
| 42 | + # the reason for the manipulation is to allow plots separating model and data stddevs. |
| 43 | + std_total = np.sqrt(np.diag(var_test)) # includes all uncertainty, model and target noise |
| 44 | + std_model = np.sqrt( std_total**2 - 1.0/beta ) # remove data noise to get model uncertainty in stddev |
| 45 | + std_combo = std_model + np.sqrt( 1.0/beta ) # add stddev (note: not the same as full) |
| 46 | + |
| 47 | + pp.plot( x_test, y_test, 'b', lw=3) |
| 48 | + pp.plot( x_test, mu_test, 'k--', lw=2 ) |
| 49 | + pp.fill_between( x_test, mu_test+2*std_combo,mu_test-2*std_combo, color='k', alpha=0.25 ) |
| 50 | + pp.fill_between( x_test, mu_test+2*std_model,mu_test-2*std_model, color='r', alpha=0.25 ) |
| 51 | + pp.plot( x_train, t_train, 'ro', ms=10 ) |
| 52 | + |
| 53 | +THETAS = np.array([[1,4,0,0], [9,4,0,0], [1,64,0,0], [1,0.25,0,0], [1,4,10,0], [1,4,0,5]]) |
| 54 | + |
| 55 | +mean = np.zeros((N_test,1)) |
| 56 | + |
| 57 | +y_test = np.random.multivariate_normal(mu_test, computeK(x_test, THETAS[5]), N_test) |
| 58 | + |
| 59 | + |
| 60 | + |
| 61 | +print y_test.shape |
| 62 | + |
| 63 | +pp.plot(x_test,y_test[0]) |
| 64 | +pp.plot(x_test,y_test[1]) |
| 65 | +pp.plot(x_test,y_test[2]) |
| 66 | +pp.plot(x_test,y_test[3]) |
| 67 | +pp.plot(x_test,y_test[4]) |
| 68 | +pp.plot(x_test,y_test[5]) |
| 69 | +#pp.plot(x_test,y_test) |
| 70 | + |
| 71 | +pp.show() |
| 72 | + |
| 73 | + |
| 74 | + |
0 commit comments