Python keras 模块,activations() 实例源码

我们从Python开源项目中,提取了以下12个代码示例,用于说明如何使用keras.activations()

项目:keras-recommendation    作者:sonyisme    | 项目源码 | 文件源码
def test_softmax():

    from keras.activations import softmax as s

    # Test using a reference implementation of softmax
    def softmax(values):
        m = max(values)
        values = numpy.array(values)
        e = numpy.exp(values - m)
        dist = list(e / numpy.sum(e))

        return dist

    x = T.vector()
    exp = s(x)
    f = theano.function([x], exp)
    test_values=get_standard_values()

    result = f(test_values)
    expected = softmax(test_values)

    print(str(result))
    print(str(expected))

    list_assert_equal(result, expected)
项目:keras-recommendation    作者:sonyisme    | 项目源码 | 文件源码
def test_relu():
    '''
    Relu implementation doesn't depend on the value being
    a theano variable. Testing ints, floats and theano tensors.
    '''

    from keras.activations import relu as r

    assert r(5) == 5
    assert r(-5) == 0
    assert r(-0.1) == 0
    assert r(0.1) == 0.1

    x = T.vector()
    exp = r(x)
    f = theano.function([x], exp)

    test_values = get_standard_values()
    result = f(test_values)

    list_assert_equal(result, test_values) # because no negatives in test values
项目:keras-recommendation    作者:sonyisme    | 项目源码 | 文件源码
def test_tanh():

    from keras.activations import tanh as t
    test_values = get_standard_values()

    x = T.vector()
    exp = t(x)
    f = theano.function([x], exp)

    result = f(test_values)
    expected = [math.tanh(v) for v in test_values]

    print(result)
    print(expected)

    list_assert_equal(result, expected)
项目:deep-coref    作者:clarkkev    | 项目源码 | 文件源码
def test_softmax():

    from keras.activations import softmax as s

    # Test using a reference implementation of softmax
    def softmax(values):
        m = max(values)
        values = numpy.array(values)
        e = numpy.exp(values - m)
        dist = list(e / numpy.sum(e))

        return dist

    x = T.vector()
    exp = s(x)
    f = theano.function([x], exp)
    test_values = get_standard_values()

    result = f(test_values)
    expected = softmax(test_values)

    print(str(result))
    print(str(expected))

    list_assert_equal(result, expected)
项目:deep-coref    作者:clarkkev    | 项目源码 | 文件源码
def test_relu():
    '''
    Relu implementation doesn't depend on the value being
    a theano variable. Testing ints, floats and theano tensors.
    '''

    from keras.activations import relu as r

    assert r(5) == 5
    assert r(-5) == 0
    assert r(-0.1) == 0
    assert r(0.1) == 0.1

    x = T.vector()
    exp = r(x)
    f = theano.function([x], exp)

    test_values = get_standard_values()
    result = f(test_values)

    list_assert_equal(result, test_values)  # because no negatives in test values
项目:RecommendationSystem    作者:TURuibo    | 项目源码 | 文件源码
def test_softmax():

    from keras.activations import softmax as s

    # Test using a reference implementation of softmax
    def softmax(values):
        m = max(values)
        values = numpy.array(values)
        e = numpy.exp(values - m)
        dist = list(e / numpy.sum(e))

        return dist

    x = T.vector()
    exp = s(x)
    f = theano.function([x], exp)
    test_values=get_standard_values()

    result = f(test_values)
    expected = softmax(test_values)

    print(str(result))
    print(str(expected))

    list_assert_equal(result, expected)
项目:RecommendationSystem    作者:TURuibo    | 项目源码 | 文件源码
def test_relu():
    '''
    Relu implementation doesn't depend on the value being
    a theano variable. Testing ints, floats and theano tensors.
    '''

    from keras.activations import relu as r

    assert r(5) == 5
    assert r(-5) == 0
    assert r(-0.1) == 0
    assert r(0.1) == 0.1

    x = T.vector()
    exp = r(x)
    f = theano.function([x], exp)

    test_values = get_standard_values()
    result = f(test_values)

    list_assert_equal(result, test_values) # because no negatives in test values
项目:RecommendationSystem    作者:TURuibo    | 项目源码 | 文件源码
def test_tanh():

    from keras.activations import tanh as t
    test_values = get_standard_values()

    x = T.vector()
    exp = t(x)
    f = theano.function([x], exp)

    result = f(test_values)
    expected = [math.tanh(v) for v in test_values]

    print(result)
    print(expected)

    list_assert_equal(result, expected)
项目:keras-recommendation    作者:sonyisme    | 项目源码 | 文件源码
def test_linear():
    '''
    This function does no input validation, it just returns the thing
    that was passed in.
    '''

    from keras.activations import linear as l

    xs = [1, 5, True, None, 'foo']

    for x in xs:
        assert x == l(x)
项目:deep-coref    作者:clarkkev    | 项目源码 | 文件源码
def test_tanh():
    from keras.activations import tanh as t
    test_values = get_standard_values()

    x = T.vector()
    exp = t(x)
    f = theano.function([x], exp)

    result = f(test_values)
    expected = [math.tanh(v) for v in test_values]

    print(result)
    print(expected)

    list_assert_equal(result, expected)
项目:deep-coref    作者:clarkkev    | 项目源码 | 文件源码
def test_linear():
    '''
    This function does no input validation, it just returns the thing
    that was passed in.
    '''

    from keras.activations import linear as l

    xs = [1, 5, True, None, 'foo']

    for x in xs:
        assert x == l(x)
项目:RecommendationSystem    作者:TURuibo    | 项目源码 | 文件源码
def test_linear():
    '''
    This function does no input validation, it just returns the thing
    that was passed in.
    '''

    from keras.activations import linear as l

    xs = [1, 5, True, None, 'foo']

    for x in xs:
        assert x == l(x)