Python theano.tensor 模块,sub() 实例源码

我们从Python开源项目中,提取了以下8个代码示例,用于说明如何使用theano.tensor.sub()

项目:SteinGAN    作者:DartML    | 项目源码 | 文件源码
def rbf_kernel(X0):
    XY = T.dot(X0, X0.transpose())
    x2 = T.reshape(T.sum(T.square(X0), axis=1), (X0.shape[0], 1))
    X2e = T.repeat(x2, X0.shape[0], axis=1)
    H = T.sub(T.add(X2e, X2e.transpose()), 2 * XY)

    V = H.flatten()

    # median distance
    h = T.switch(T.eq((V.shape[0] % 2), 0),
        # if even vector
        T.mean(T.sort(V)[ ((V.shape[0] // 2) - 1) : ((V.shape[0] // 2) + 1) ]),
        # if odd vector
        T.sort(V)[V.shape[0] // 2])

    h = T.sqrt(0.5 * h / T.log(X0.shape[0].astype('float32') + 1.0)) / 2.

    Kxy = T.exp(-H / h ** 2 / 2.0)
    neighbors = T.argsort(H, axis=1)[:, 1]

    return Kxy, neighbors, h
项目:SteinGAN    作者:DartML    | 项目源码 | 文件源码
def rbf_kernel(X0):
    XY = T.dot(X0, X0.transpose())
    x2 = T.reshape(T.sum(T.square(X0), axis=1), (X0.shape[0], 1))
    X2e = T.repeat(x2, X0.shape[0], axis=1)
    H = T.sub(T.add(X2e, X2e.transpose()), 2 * XY)

    V = H.flatten()

    # median distance
    h = T.switch(T.eq((V.shape[0] % 2), 0),
        # if even vector
        T.mean(T.sort(V)[ ((V.shape[0] // 2) - 1) : ((V.shape[0] // 2) + 1) ]),
        # if odd vector
        T.sort(V)[V.shape[0] // 2])

    h = T.sqrt(0.5 * h / T.log(X0.shape[0].astype('float32') + 1.0)) / 2.

    Kxy = T.exp(-H / h ** 2 / 2.0)
    neighbors = T.argsort(H, axis=1)[:, 1]

    return Kxy, neighbors, h
项目:recnet    作者:joergfranke    | 项目源码 | 文件源码
def fit(self, weights, o_error, tpo ):

        gradients = T.grad(o_error ,weights)
        updates = []
        for c, v, w, g in zip(self.t_cache, self.t_velocity, weights,gradients):
            new_velocity = T.sub( T.mul(tpo["momentum_rate"], v) , T.mul(tpo["learn_rate"], g) )
            new_cache = T.add( T.mul(tpo["decay_rate"] , c) , T.mul(T.sub( 1, tpo["decay_rate"]) , T.sqr(g)))
            new_weights = T.sub(T.add(w , new_velocity) , T.true_div( T.mul(g,tpo["learn_rate"]) , T.sqrt(T.add(new_cache,0.1**8))))
            updates.append((w, new_weights))
            updates.append((v, new_velocity))
            updates.append((c, new_cache))

        return updates


######                 Nesterov momentum
########################################
项目:Theano-Deep-learning    作者:GeekLiB    | 项目源码 | 文件源码
def c_code(self, node, name, inputs, outputs, sub):
        (x,) = inputs
        (z,) = outputs
        return "%(z)s = %(name)s_timesn(%(x)s);" % locals()
项目:Theano-Deep-learning    作者:GeekLiB    | 项目源码 | 文件源码
def test_elemwise(self):
        # float Ops
        mats = theano.tensor.matrices('cabxy')
        c, a, b, x, y = mats
        s1 = T.switch(c, a, b)
        s2 = T.switch(c, x, y)
        for op in (T.add, T.sub, T.mul, T.true_div, T.int_div, T.floor_div,
                   T.minimum, T.maximum, T.gt, T.lt, T.ge, T.le, T.eq, T.neq,
                   T.pow):
            g = optimize(FunctionGraph(mats, [op(s1, s2)]))
            assert str(g).count('Switch') == 1
        # integer Ops
        mats = theano.tensor.imatrices('cabxy')
        c, a, b, x, y = mats
        s1 = T.switch(c, a, b)
        s2 = T.switch(c, x, y)
        for op in (T.and_, T.or_, T.xor,
                   T.bitwise_and, T.bitwise_or, T.bitwise_xor):
            g = optimize(FunctionGraph(mats, [op(s1, s2)]))
            assert str(g).count('Switch') == 1
        # add/mul with more than two inputs
        u, v = theano.tensor.matrices('uv')
        s3 = T.switch(c, u, v)
        for op in (T.add, T.mul):
            g = optimize(FunctionGraph(mats + [u, v], [op(s1, s2, s3)]))
            assert str(g).count('Switch') == 1
项目:deepstacks    作者:guoxuesong    | 项目源码 | 文件源码
def __call__(self,inputs):
        if self.network is not None:
            #if 'image' not in inputs:
            #    print inputs
            network=inputs['image']
            if 'mean' in inputs:
                network=lasagne.layers.ElemwiseMergeLayer((network,inputs['mean']),T.sub)
                inputs=inputs.copy()
                inputs.pop('mean')
            return deepstacks.lasagne.build_network(network, self.network, inputs)
        elif self.build_network is not None:
            return self.build_network(inputs)
项目:deepstacks    作者:guoxuesong    | 项目源码 | 文件源码
def sub_handler(layers, flags, stacks, this_model):
    return lasagne.layers.ElemwiseMergeLayer(layers, T.sub)
项目:Theano-Deep-learning    作者:GeekLiB    | 项目源码 | 文件源码
def test_local_useless_inc_subtensor():
    x = tensor.matrix('x')
    y = tensor.matrix('y')
    mode = compile.get_default_mode().including("local_useless_inc_subtensor")
    for sub in [slice(None), slice(None, None, -1)]:
        o = tensor.set_subtensor(x[::, sub], y)
        f = theano.function([x, y], o, mode=mode)
        o_shape = tensor.set_subtensor(x[::, sub],
                                       tensor.specify_shape(y, x.shape))
        f_shape = theano.function([x, y], o_shape, mode=mode)

        # Test with shape info
        topo = f_shape.maker.fgraph.toposort()
        assert not any(isinstance(n.op, tensor.IncSubtensor) for n in topo)
        out = f_shape([[2, 3]], [[3, 4]])
        assert (out == numpy.asarray([[3, 4]])[::, sub]).all()

        # Test that without shape info, we don't apply the opt.
        topo = f.maker.fgraph.toposort()
        assert len(topo) == 1
        assert isinstance(topo[0].op, tensor.IncSubtensor)
        out = f([[2, 3]], [[3, 4]])
        assert (out == numpy.asarray([[3, 4]])[::, sub]).all()

        # Test that we don't remove shape error
        try:
            f([[2, 3]], [[3, 4], [4, 5]])
            assert False
        except (ValueError, AssertionError):
            pass

        # Test that we don't remove broadcastability
        out = f([[2, 3], [3, 4]], [[5, 6]])
        assert (out == numpy.asarray([[5, 6], [5, 6]])[::, sub]).all()

    # Test that we do not optimize others strides even when sub and y
    # have same shapes
    sub = x[::, ::2]
    o_shape = tensor.set_subtensor(sub,
                                   tensor.specify_shape(y, sub.shape))
    f_shape = theano.function([x, y], o_shape)
    topo = f_shape.maker.fgraph.toposort()
    # theano.printing.debugprint(f_shape)
    assert any(isinstance(n.op, tensor.IncSubtensor) for n in topo)
    out = f_shape([[2, 3, 6, 7]], [[8, 9]])
    assert (out == numpy.asarray([[8, 3, 9, 7]])).all()