For those who are interested, here is one solution

import numpy as np
import theano
from theano import tensor as T
theano_rng = T.shared_randomstreams.RandomStreams(1234)
W_values = np.array([[1,1,1],[1,1,1]], dtype=theano.config.floatX).T #3 
visibles and 2 hidden
bvis_values = np.array([1,1,1], dtype=theano.config.floatX)
bhid_values = np.array([0.5,0.5], dtype=theano.config.floatX)
#W_values = np.array([[.1,-.4],[5,.4],[-.5,.3]], dtype=theano.config.floatX)
#bvis_values = np.array([0.5,-0.6], dtype=theano.config.floatX)
#bhid_values = np.array([-2,1,2], dtype=theano.config.floatX)
W = theano.shared(W_values)
vbias = theano.shared(bvis_values)
hbias = theano.shared(bhid_values)

def propup(vis, v_doc_len):
        pre_sigmoid_activation = T.dot(vis, W) + T.outer(v_doc_len,hbias)   
     #---------------------------[edited]
        return [pre_sigmoid_activation, 
T.nnet.sigmoid(pre_sigmoid_activation)]

def sample_h_given_v(v0_sample, v_doc_len):
    pre_sigmoid_h1, h1_mean = propup(v0_sample, v_doc_len)
    h1_sample = theano_rng.binomial(size=h1_mean.shape,
                                         n=1, p=h1_mean,
                                         dtype=theano.config.floatX)
    return [pre_sigmoid_h1, h1_mean, h1_sample]

def propdown(hid):
    pre_softmax_activation = T.dot(hid, W.T) + vbias                       
        #---------------------------[edited]
    return [pre_softmax_activation, T.nnet.softmax(pre_softmax_activation)]

def sample_v_given_h(h0_sample, v_doc_len):
    pre_softmax_v1, v1_mean = propdown(h0_sample)
    v1_sample = theano_rng.multinomial(size=None,
                                         n=v_doc_len, pvals=v1_mean,
                                         dtype=theano.config.floatX)       
        #---------------------------[edited]
    return [pre_softmax_v1, v1_mean, v1_sample]

def gibbs_hvh(h0_sample, v_doc_len):
    pre_softmax_v1, v1_mean, v1_sample = sample_v_given_h(h0_sample, 
v_doc_len)
    pre_sigmoid_h1, h1_mean, h1_sample = sample_h_given_v(v1_sample, 
v_doc_len)
    return [pre_softmax_v1,    v1_mean,    v1_sample,
            pre_sigmoid_h1, h1_mean, h1_sample ]                       
 #---------------------------[edited]


ipt = T.matrix()
ipt_rSum = ipt.sum(axis=1)

pre_sigmoid_ph, ph_mean, ph_sample = sample_h_given_v(ipt, ipt_rSum)
chain_start = ph_sample

results, updates = theano.scan( fn = gibbs_hvh,
                                outputs_info = [None, None, None, None, 
None, chain_start],
                                non_sequences = ipt_rSum,
                                n_steps=2 )

hgv = theano.function( [ipt], outputs=results, updates = updates)

b = np.array( np.array([[1,6,1],[1,3,2],[5,2,1],[5,1,2]], dtype = 
theano.config.floatX) )
output = hgv(b)
[out_1,out_2,out_3,out_4,out_5,out_6] = output

-- 

--- 
You received this message because you are subscribed to the Google Groups 
"theano-users" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to theano-users+unsubscr...@googlegroups.com.
For more options, visit https://groups.google.com/d/optout.

Reply via email to