Python theano.tensor 模块,and_() 实例源码

我们从Python开源项目中,提取了以下5个代码示例,用于说明如何使用theano.tensor.and_()

项目:Theano-Deep-learning    作者:GeekLiB    | 项目源码 | 文件源码
def test_and(self):
        mode = theano.compile.get_default_mode().including('canonicalize')

        x = T.scalar('x', dtype='int8')

        for zero, one in [(numpy.int8(0), numpy.int8(1)), (0, 1)]:
            f = theano.function([x], T.and_(x, zero), mode=mode)
            self.assert_eqs_const(f, 0)

            f = theano.function([x], T.and_(zero, x), mode=mode)
            self.assert_eqs_const(f, 0)

            f = theano.function([x], T.and_(x, one), mode=mode)
            if f.outputs[0].variable.dtype == x.dtype:
                self.assert_identity(f)

            f = theano.function([x], T.and_(one, x), mode=mode)
            if f.outputs[0].variable.dtype == x.dtype:
                self.assert_identity(f)
项目:NNBuilder    作者:aeloyq    | 项目源码 | 文件源码
def and_(self, l, r):
            return T.and_(l, r)
项目:nature_methods_multicut_pipeline    作者:ilastik    | 项目源码 | 文件源码
def sylu(gain=10, spread=0.1):
    return lambda x: switch(T.ge(x, (1 / spread)), gain, 0) + \
                     switch(T.and_(T.gt((1 / spread), x), T.gt(x, -(1 / spread))), gain * spread * x, 0) + \
                     switch(T.le(x, -(1 / spread)), -gain, 0)


# Exponential Linear Unit
项目:nature_methods_multicut_pipeline    作者:ilastik    | 项目源码 | 文件源码
def sylu(gain=10, spread=0.1):
    return lambda x: switch(T.ge(x, (1 / spread)), gain, 0) + \
                     switch(T.and_(T.gt((1 / spread), x), T.gt(x, -(1 / spread))), gain * spread * x, 0) + \
                     switch(T.le(x, -(1 / spread)), -gain, 0)


# Exponential Linear Unit
项目:Theano-Deep-learning    作者:GeekLiB    | 项目源码 | 文件源码
def test_elemwise(self):
        # float Ops
        mats = theano.tensor.matrices('cabxy')
        c, a, b, x, y = mats
        s1 = T.switch(c, a, b)
        s2 = T.switch(c, x, y)
        for op in (T.add, T.sub, T.mul, T.true_div, T.int_div, T.floor_div,
                   T.minimum, T.maximum, T.gt, T.lt, T.ge, T.le, T.eq, T.neq,
                   T.pow):
            g = optimize(FunctionGraph(mats, [op(s1, s2)]))
            assert str(g).count('Switch') == 1
        # integer Ops
        mats = theano.tensor.imatrices('cabxy')
        c, a, b, x, y = mats
        s1 = T.switch(c, a, b)
        s2 = T.switch(c, x, y)
        for op in (T.and_, T.or_, T.xor,
                   T.bitwise_and, T.bitwise_or, T.bitwise_xor):
            g = optimize(FunctionGraph(mats, [op(s1, s2)]))
            assert str(g).count('Switch') == 1
        # add/mul with more than two inputs
        u, v = theano.tensor.matrices('uv')
        s3 = T.switch(c, u, v)
        for op in (T.add, T.mul):
            g = optimize(FunctionGraph(mats + [u, v], [op(s1, s2, s3)]))
            assert str(g).count('Switch') == 1