tntn

Theano reimplementation of Neural Tensor Networks (NTN)
git clone https://esimon.eu/repos/tntn.git
Log | Files | Refs | README

commit 5b3138429c544454203f99b6ac938d3937e851ba
parent 6ccff23ecf2f6f601ea69d3fef7eec1821e74a07
Author: Étienne Simon <esimon@esimon.eu>
Date:   Thu, 10 Apr 2014 12:02:44 +0200

Fix bias broadcasting

Diffstat:
Mmodel.py | 6++++--
1 file changed, 4 insertions(+), 2 deletions(-)

diff --git a/model.py b/model.py @@ -1,4 +1,5 @@ #!/usr/bin/env python2 + import numpy import scipy import theano @@ -40,14 +41,15 @@ class Relation(object): self.W = ip(tag+".W", (n_in, n_in, n_hid), wbound) self.V = ip(tag+".V", (n_hid, n_in*2), vbound) self.u = ip(tag+".u", (n_hid,), ubound) - self.b = theano.shared(name=tag+".b", value=numpy.zeros(shape=(n_hid,1), dtype=theano.config.floatX), broadcastable=[False, True]) + self.b = theano.shared(name=tag+".b", value=numpy.zeros(shape=(n_hid,), dtype=theano.config.floatX)) self.params = [ self.W, self.V, self.u, self.b ] def score(self, inputl, inputr): """ Compute the score on given embeddings. """ bilinear = ((inputr.transpose().reshape((inputr.shape[1], inputr.shape[0], 1))) * T.tensordot(inputl, self.W, axes=([0], [0]))).sum(1).transpose() linear = T.dot(self.V, T.concatenate([inputl, inputr])) - return T.dot(self.u, self.act(bilinear + linear + self.b)) + bias = self.b.dimshuffle(0, 'x') + return T.dot(self.u, self.act(bilinear + linear + bias)) def regularizer(self): """ Compute the squared L2-norm of the relation's parameters. """