Python keras.layers.advanced_activations 模块,ThresholdedReLU() 实例源码

我们从Python开源项目中,提取了以下9个代码示例,用于说明如何使用keras.layers.advanced_activations.ThresholdedReLU()

项目:keras    作者:GeekLiB    | 项目源码 | 文件源码
def test_thresholded_relu():
    from keras.layers.advanced_activations import ThresholdedReLU
    layer_test(ThresholdedReLU, kwargs={'theta': 0.5},
               input_shape=(2, 3, 4))
项目:Fabrik    作者:Cloud-CV    | 项目源码 | 文件源码
def test_keras_export(self):
        tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'keras_app',
                                  'keras_export_test.json'), 'r')
        response = json.load(tests)
        tests.close()
        net = yaml.safe_load(json.dumps(response['net']))
        net = {'l0': net['Input'], 'l1': net['ThresholdedReLU']}
        net['l0']['connection']['output'].append('l1')
        inp = data(net['l0'], '', 'l0')['l0']
        net = activation(net['l1'], [inp], 'l1')
        model = Model(inp, net['l1'])
        self.assertEqual(model.layers[1].__class__.__name__, 'ThresholdedReLU')
项目:Fabrik    作者:Cloud-CV    | 项目源码 | 文件源码
def activation(layer, layer_in, layerId):
    out = {}
    if (layer['info']['type'] == 'ReLU'):
        if (layer['params']['negative_slope'] != 0):
            out[layerId] = LeakyReLU(alpha=layer['params']['negative_slope'])(*layer_in)
        else:
            out[layerId] = Activation('relu')(*layer_in)
    elif (layer['info']['type'] == 'PReLU'):
        out[layerId] = PReLU()(*layer_in)
    elif (layer['info']['type'] == 'ELU'):
        out[layerId] = ELU(alpha=layer['params']['alpha'])(*layer_in)
    elif (layer['info']['type'] == 'ThresholdedReLU'):
        out[layerId] = ThresholdedReLU(theta=layer['params']['theta'])(*layer_in)
    elif (layer['info']['type'] == 'Sigmoid'):
        out[layerId] = Activation('sigmoid')(*layer_in)
    elif (layer['info']['type'] == 'TanH'):
        out[layerId] = Activation('tanh')(*layer_in)
    elif (layer['info']['type'] == 'Softmax'):
        out[layerId] = Activation('softmax')(*layer_in)
    elif (layer['info']['type'] == 'SELU'):
        out[layerId] = Activation('selu')(*layer_in)
    elif (layer['info']['type'] == 'Softplus'):
        out[layerId] = Activation('softplus')(*layer_in)
    elif (layer['info']['type'] == 'Softsign'):
        out[layerId] = Activation('softsign')(*layer_in)
    elif (layer['info']['type'] == 'HardSigmoid'):
        out[layerId] = Activation('hard_sigmoid')(*layer_in)
    return out
项目:coremltools    作者:apple    | 项目源码 | 文件源码
def test_tiny_conv_thresholded_relu_random(self):
        np.random.seed(1988)

        # Define a model
        from keras.layers.advanced_activations import ThresholdedReLU
        model = Sequential()
        model.add(Conv2D(input_shape = (10, 10, 3),
            filters = 3, kernel_size=(5,5), padding = 'same'))
        model.add(ThresholdedReLU(theta=0.8))

        model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])

        # Get the coreml model
        self._test_keras_model(model)
项目:coremltools    作者:apple    | 项目源码 | 文件源码
def test_tiny_conv_thresholded_relu_random(self):
        np.random.seed(1988)

        # Define a model
        from keras.layers.advanced_activations import ThresholdedReLU
        model = Sequential()
        model.add(Convolution2D(input_shape = (10, 10, 3),
            nb_filter = 3, nb_row = 5, nb_col = 5, border_mode = 'same'))
        model.add(ThresholdedReLU(theta=0.8))

        model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])

        # Get the coreml model
        self._test_keras_model(model)
项目:auto_ml    作者:ClimbsRocks    | 项目源码 | 文件源码
def get_activation_layer(activation):
    if activation == 'LeakyReLU':
        return LeakyReLU()
    if activation == 'PReLU':
        return PReLU()
    if activation == 'ELU':
        return ELU()
    if activation == 'ThresholdedReLU':
        return ThresholdedReLU()

    return Activation(activation)

# TODO: same for optimizers, including clipnorm
项目:keras-customized    作者:ambrite    | 项目源码 | 文件源码
def test_thresholded_relu():
    from keras.layers.advanced_activations import ThresholdedReLU
    layer_test(ThresholdedReLU, kwargs={'theta': 0.5},
               input_shape=(2, 3, 4))
项目:keras    作者:NVIDIA    | 项目源码 | 文件源码
def test_thresholded_relu():
    from keras.layers.advanced_activations import ThresholdedReLU
    layer_test(ThresholdedReLU, kwargs={'theta': 0.5},
               input_shape=(2, 3, 4))
项目:Fabrik    作者:Cloud-CV    | 项目源码 | 文件源码
def test_keras_import(self):
        # softmax
        model = Sequential()
        model.add(Activation('softmax', input_shape=(15,)))
        model.build()
        self.keras_type_test(model, 0, 'Softmax')
        # relu
        model = Sequential()
        model.add(Activation('relu', input_shape=(15,)))
        model.build()
        self.keras_type_test(model, 0, 'ReLU')
        # tanh
        model = Sequential()
        model.add(Activation('tanh', input_shape=(15,)))
        model.build()
        self.keras_type_test(model, 0, 'TanH')
        # sigmoid
        model = Sequential()
        model.add(Activation('sigmoid', input_shape=(15,)))
        model.build()
        self.keras_type_test(model, 0, 'Sigmoid')
        # selu
        model = Sequential()
        model.add(Activation('selu', input_shape=(15,)))
        model.build()
        self.keras_type_test(model, 0, 'SELU')
        # softplus
        model = Sequential()
        model.add(Activation('softplus', input_shape=(15,)))
        model.build()
        self.keras_type_test(model, 0, 'Softplus')
        # softsign
        model = Sequential()
        model.add(Activation('softsign', input_shape=(15,)))
        model.build()
        self.keras_type_test(model, 0, 'Softsign')
        # hard_sigmoid
        model = Sequential()
        model.add(Activation('hard_sigmoid', input_shape=(15,)))
        model.build()
        self.keras_type_test(model, 0, 'HardSigmoid')
        # LeakyReLU
        model = Sequential()
        model.add(LeakyReLU(alpha=1, input_shape=(15,)))
        model.build()
        self.keras_type_test(model, 0, 'ReLU')
        # PReLU
        model = Sequential()
        model.add(PReLU(input_shape=(15,)))
        model.build()
        self.keras_type_test(model, 0, 'PReLU')
        # ELU
        model = Sequential()
        model.add(ELU(alpha=1, input_shape=(15,)))
        model.build()
        self.keras_type_test(model, 0, 'ELU')
        # ThresholdedReLU
        model = Sequential()
        model.add(ThresholdedReLU(theta=1, input_shape=(15,)))
        model.build()
        self.keras_type_test(model, 0, 'ThresholdedReLU')