Python keras.initializations 模块,get() 实例源码

我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用keras.initializations.get()

项目:keras-utilities    作者:cbaziotis    | 项目源码 | 文件源码
def __init__(self,
                 W_regularizer=None, u_regularizer=None, b_regularizer=None,
                 W_constraint=None, u_constraint=None, b_constraint=None,
                 bias=True, **kwargs):

        self.supports_masking = True
        self.init = initializations.get('glorot_uniform')

        self.W_regularizer = regularizers.get(W_regularizer)
        self.u_regularizer = regularizers.get(u_regularizer)
        self.b_regularizer = regularizers.get(b_regularizer)

        self.W_constraint = constraints.get(W_constraint)
        self.u_constraint = constraints.get(u_constraint)
        self.b_constraint = constraints.get(b_constraint)

        self.bias = bias
        super(AttentionWithContext, self).__init__(**kwargs)
项目:State-Frequency-Memory-stock-prediction    作者:z331565360    | 项目源码 | 文件源码
def __init__(self, output_dim, freq_dim, hidden_dim,
                 init='glorot_uniform', inner_init='orthogonal',
                 forget_bias_init='one', activation='tanh',
                 inner_activation='hard_sigmoid',
                 W_regularizer=None, U_regularizer=None, b_regularizer=None,
                 dropout_W=0., dropout_U=0., **kwargs):
        self.output_dim = output_dim
        self.freq_dim = freq_dim
        self.hidden_dim = hidden_dim
        self.init = initializations.get(init)
        self.inner_init = initializations.get(inner_init)
        self.forget_bias_init = initializations.get(forget_bias_init)
        self.activation = activations.get(activation)
        self.inner_activation = activations.get(inner_activation)
        self.W_regularizer = regularizers.get(W_regularizer)
        self.U_regularizer = regularizers.get(U_regularizer)
        self.b_regularizer = regularizers.get(b_regularizer)
        self.dropout_W, self.dropout_U = dropout_W, dropout_U

        if self.dropout_W or self.dropout_U:
            self.uses_learning_phase = True
        super(ITOSFM, self).__init__(**kwargs)
项目:State-Frequency-Memory-stock-prediction    作者:z331565360    | 项目源码 | 文件源码
def __init__(self, output_dim, freq_dim, hidden_dim,
                 init='glorot_uniform', inner_init='orthogonal',
                 forget_bias_init='one', activation='tanh',
                 inner_activation='hard_sigmoid',
                 W_regularizer=None, U_regularizer=None, b_regularizer=None,
                 dropout_W=0., dropout_U=0., **kwargs):
        self.output_dim = output_dim
        self.freq_dim = freq_dim
        self.hidden_dim = hidden_dim
        self.init = initializations.get(init)
        self.inner_init = initializations.get(inner_init)
        self.forget_bias_init = initializations.get(forget_bias_init)
        self.activation = activations.get(activation)
        self.inner_activation = activations.get(inner_activation)
        self.W_regularizer = regularizers.get(W_regularizer)
        self.U_regularizer = regularizers.get(U_regularizer)
        self.b_regularizer = regularizers.get(b_regularizer)
        self.dropout_W, self.dropout_U = dropout_W, dropout_U

        if self.dropout_W or self.dropout_U:
            self.uses_learning_phase = True
        super(ITOSFM, self).__init__(**kwargs)
项目:emnlp2017-bilstm-cnn-crf    作者:UKPLab    | 项目源码 | 文件源码
def __init__(self, init='glorot_uniform',
                 U_regularizer=None, b_start_regularizer=None, b_end_regularizer=None,
                 U_constraint=None, b_start_constraint=None, b_end_constraint=None,
                 weights=None,
                 **kwargs):
        self.supports_masking = True
        self.uses_learning_phase = True
        self.input_spec = [InputSpec(ndim=3)]
        self.init = initializations.get(init)

        self.U_regularizer = regularizers.get(U_regularizer)
        self.b_start_regularizer = regularizers.get(b_start_regularizer)
        self.b_end_regularizer = regularizers.get(b_end_regularizer)
        self.U_constraint = constraints.get(U_constraint)
        self.b_start_constraint = constraints.get(b_start_constraint)
        self.b_end_constraint = constraints.get(b_end_constraint)

        self.initial_weights = weights

        super(ChainCRF, self).__init__(**kwargs)
项目:NTM-Keras    作者:SigmaQuan    | 项目源码 | 文件源码
def __init__(self, output_dim, memory_dim=128, memory_size=20,
                 controller_output_dim=100, location_shift_range=1,
                 num_read_head=1, num_write_head=1,
                 init='glorot_uniform', inner_init='orthogonal',
                 forget_bias_init='one', activation='tanh',
                 inner_activation='hard_sigmoid',
                 W_regularizer=None, U_regularizer=None, R_regularizer=None,
                 b_regularizer=None, W_y_regularizer=None,
                 W_xi_regularizer=None, W_r_regularizer=None,
                 dropout_W=0., dropout_U=0., **kwargs):
        self.output_dim = output_dim
        self.init = initializations.get(init)
        self.inner_init = initializations.get(inner_init)
        self.forget_bias_init = initializations.get(forget_bias_init)
        self.activation = activations.get(activation)
        self.inner_activation = activations.get(inner_activation)
        self.W_regularizer = regularizers.get(W_regularizer)
        self.U_regularizer = regularizers.get(U_regularizer)
        self.b_regularizer = regularizers.get(b_regularizer)
        self.dropout_W, self.dropout_U = dropout_W, dropout_U

        if self.dropout_W or self.dropout_U:
            self.uses_learning_phase = True
        super(NTM, self).__init__(**kwargs)
项目:ppap    作者:unique-horn    | 项目源码 | 文件源码
def __init__(self,
                 input_channels,
                 output_shape,
                 num_filters,
                 layer_sizes,
                 init="glorot_uniform"):
        """
        Parameters
        ----------
        output_shape : list_like
            Size of the generated matrix (x, y)
        layer_sizes : array_like
            List of nodes in hidden layers
        init : str
            Keras initializer to use for weights
        """
        self.input_channels = input_channels
        self.num_filters = num_filters
        self.output_shape = output_shape
        self.layer_sizes = layer_sizes
        self.init = initializations.get(init)
        self.bias_init = initializations.get("zero")

        self.setup_weights()
        self.setup_output()
项目:ppap    作者:unique-horn    | 项目源码 | 文件源码
def __init__(self,
                 output_shape,
                 layer_sizes,
                 scale,
                 init="glorot_uniform"):
        """
        Parameters
        ----------
        output_shape : list_like
            Size of the generated matrix (x, y)
        layer_sizes : array_like
            List of nodes in hidden layers
        init : str
            Keras initializer to use for weights
        """
        self.output_shape = output_shape
        self.layer_sizes = layer_sizes
        self.init = initializations.get(init)
        self.bias_init = initializations.get("zero")
        self.scale = scale

        self.setup_weights()
        self.setup_output()
项目:ppap    作者:unique-horn    | 项目源码 | 文件源码
def __init__(self, filter_size, input_shape, filters_in, batch_size):
        """
        Parameters:
        -----------
        filter_size : int
            Size of the filter in 1 dimension (total = filter_size ** 2)
        input_shape: list_like
            Size of input image this filter is working on. This is used for
            generating separate filters for each pixel position of the image
        filter_in : int
            Number of channels in input
        batch_size : int
            Batch size
        """

        self.filter_size = filter_size
        self.input_shape = input_shape
        self.filters_in = filters_in
        self.batch_size = batch_size
        self.init = initializations.get("glorot_uniform")
        self.b_init = initializations.get("zero")

        self.setup_weights()
项目:ppap    作者:unique-horn    | 项目源码 | 文件源码
def __init__(self, filter_size, input_shape, filters_in, batch_size):
        """
        Parameters:
        -----------
        filter_size : int
            Size of the filter in 1 dimension (total = filter_size ** 2)
        input_shape: list_like
            Size of input image this filter is working on. This is used for
            generating separate filters for each pixel position of the image
        filter_in : int
            Number of channels in input
        batch_size : int
            Batch size
        """

        self.filter_size = filter_size
        self.input_shape = input_shape
        self.filters_in = filters_in
        self.batch_size = batch_size
        self.init = initializations.get("glorot_uniform")
        self.b_init = initializations.get("zero")

        self.setup_weights()
项目:ppap    作者:unique-horn    | 项目源码 | 文件源码
def __init__(self, filter_size, input_shape, filters_in, batch_size):
        """
        Parameters:
        -----------
        filter_size : int
            Size of the filter in 1 dimension (total = filter_size ** 2)
        input_shape: list_like
            Size of input image this filter is working on. This is used for
            generating separate filters for each pixel position of the image
        filter_in : int
            Number of channels in input
        batch_size : int
            Batch size
        """

        self.filter_size = filter_size
        self.input_shape = input_shape
        self.filters_in = filters_in
        self.batch_size = batch_size
        self.init = initializations.get("glorot_uniform")
        self.b_init = initializations.get("zero")

        self.setup_weights()
项目:Keras_note    作者:LibCorner    | 项目源码 | 文件源码
def __init__(self,output_dim,mem_vec_dim,init='glorot_uniform', activation='linear', weights=None,
                 activity_regularizer=None,input_dim=None, **kwargs):
        '''
        Params:
            output_dim: ?????
            mem_vec_dim: query?????

        '''
        self.init = initializations.get(init)
        self.activation = activations.get(activation)
        self.output_dim = output_dim
        self.input_dim = input_dim
        self.mem_vector_dim=mem_vec_dim

        self.activity_regularizer = regularizers.get(activity_regularizer)


        self.initial_weights = weights

        if self.input_dim:
            kwargs['input_shape'] = (self.input_dim,)
        super(MemoryNet,self).__init__(**kwargs)
项目:deep-models    作者:LaurentMazare    | 项目源码 | 文件源码
def __init__(self, output_dim, L,
             init='glorot_uniform', inner_init='orthogonal',
             activation='tanh', inner_activation='hard_sigmoid',
             W_regularizer=None, U_regularizer=None, b_regularizer=None,
             dropout_W=0., dropout_U=0., **kwargs):
    self.output_dim = output_dim
    self.init = initializations.get(init)
    self.inner_init = initializations.get(inner_init)
    self.activation = activations.get(activation)
    self.inner_activation = activations.get(inner_activation)
    self.W_regularizer = regularizers.get(W_regularizer)
    self.U_regularizer = regularizers.get(U_regularizer)
    self.b_regularizer = regularizers.get(b_regularizer)
    self.dropout_W, self.dropout_U = dropout_W, dropout_U
    self.L = L

    if self.dropout_W or self.dropout_U:
        self.uses_learning_phase = True
    super(RHN, self).__init__(**kwargs)
项目:NN_sentiment    作者:hx364    | 项目源码 | 文件源码
def __init__(self, input_dim, output_dim,
                 init='uniform', input_length=None,
                 W_regularizer=None, activity_regularizer=None,
                 W_constraint=None,
                 mask_zero=False,
                 weights=None, **kwargs):
        self.input_dim = input_dim
        self.output_dim = output_dim
        self.init = initializations.get(init)
        self.input_length = input_length
        self.mask_zero = mask_zero

        self.W_constraint = constraints.get(W_constraint)
        self.constraints = [self.W_constraint]

        self.W_regularizer = regularizers.get(W_regularizer)
        self.activity_regularizer = regularizers.get(activity_regularizer)

        self.initial_weights = weights
        kwargs['input_shape'] = (self.input_dim,)
        super(Embedding2D, self).__init__(**kwargs)
项目:NN_sentiment    作者:hx364    | 项目源码 | 文件源码
def __init__(self, input_dim, output_dim,
                 init='uniform', input_length=None,
                 W_regularizer=None, activity_regularizer=None,
                 W_constraint=None,
                 mask_zero=False,
                 weights=None, **kwargs):
        self.input_dim = input_dim
        self.output_dim = output_dim
        self.init = initializations.get(init)
        self.input_length = input_length
        self.mask_zero = mask_zero

        self.W_constraint = constraints.get(W_constraint)
        self.constraints = [self.W_constraint]

        self.W_regularizer = regularizers.get(W_regularizer)
        self.activity_regularizer = regularizers.get(activity_regularizer)

        self.initial_weights = weights
        kwargs['input_shape'] = (self.input_dim,)
        super(Embedding, self).__init__(**kwargs)
项目:mlnet    作者:marcellacornia    | 项目源码 | 文件源码
def __init__(self, downsampling_factor=10, init='glorot_uniform', activation='linear',
                 weights=None, W_regularizer=None, activity_regularizer=None,
                 W_constraint=None, input_dim=None, **kwargs):

        self.downsampling_factor = downsampling_factor
        self.init = initializations.get(init)
        self.activation = activations.get(activation)

        self.W_regularizer = regularizers.get(W_regularizer)
        self.activity_regularizer = regularizers.get(activity_regularizer)

        self.W_constraint = constraints.get(W_constraint)

        self.initial_weights = weights

        self.input_dim = input_dim
        if self.input_dim:
            kwargs['input_shape'] = (self.input_dim,)

        self.input_spec = [InputSpec(ndim=4)]
        super(EltWiseProduct, self).__init__(**kwargs)
项目:huffmax    作者:farizrahman4u    | 项目源码 | 文件源码
def __init__(self, nb_classes, frequency_table=None, mode=0, init='glorot_uniform', weights=None, W_regularizer=None, b_regularizer=None, activity_regularizer=None,
                 W_constraint=None, b_constraint=None,
                 bias=True, verbose=False, **kwargs):
        '''
        # Arguments:
        nb_classes: Number of classes.
        frequency_table: list. Frequency of each class. More frequent classes will have shorter huffman codes.
        mode: integer. One of [0, 1]
        verbose: boolean. Set to true to see the progress of building huffman tree. 
        '''
        self.nb_classes = nb_classes
        if frequency_table is None:
            frequency_table = [1] * nb_classes
        self.frequency_table = frequency_table
        self.mode = mode
        self.init = initializations.get(init)
        self.W_regularizer = regularizers.get(W_regularizer)
        self.b_regularizer = regularizers.get(b_regularizer)
        self.activity_regularizer = regularizers.get(activity_regularizer)
        self.W_constraint = constraints.get(W_constraint)
        self.b_constraint = constraints.get(b_constraint)
        self.bias = bias
        self.initial_weights = weights
        self.verbose = verbose
        super(Huffmax, self).__init__(**kwargs)
项目:New_Layers-Keras-Tensorflow    作者:WeidiXie    | 项目源码 | 文件源码
def __init__(self, output_dim,
                 init='glorot_uniform', inner_init='orthogonal',
                 activation='tanh', beta_init='zero', gamma_init='one',
                 W_regularizer=None, U_regularizer=None, b_regularizer=None,
                 gamma_regularizer=None, beta_regularizer=None,
                 dropout_W=0., dropout_U=0., **kwargs):
        self.output_dim = output_dim
        self.activation = activations.get(activation)
        self.init = initializations.get(init)
        self.inner_init = initializations.get(inner_init)
        self.beta_init = initializations.get(beta_init)
        self.gamma_init = initializations.get(gamma_init)
        self.W_regularizer = regularizers.get(W_regularizer)
        self.U_regularizer = regularizers.get(U_regularizer)
        self.b_regularizer = regularizers.get(b_regularizer)
        self.gamma_regularizer = regularizers.get(gamma_regularizer)
        self.beta_regularizer = regularizers.get(beta_regularizer)
        self.dropout_W = dropout_W
        self.dropout_U = dropout_U
        self.epsilon = 1e-5
        if self.dropout_W or self.dropout_U:
            self.uses_learning_phase = True
        super(LN_SimpleRNN, self).__init__(**kwargs)
项目:NeuralSentenceOrdering    作者:FudanNLP    | 项目源码 | 文件源码
def __init__(self, input_dim, output_dim,
                 init='glorot_uniform', inner_init='orthogonal',
                 forget_bias_init='one', activation='tanh',
                 inner_activation='hard_sigmoid'):
        #self.input_dim = input_dim
        self.output_dim = int(output_dim / 2)
        self.init = initializations.get(init)
        self.inner_init = initializations.get(inner_init)
        self.forget_bias_init = initializations.get(forget_bias_init)
        self.activation = activations.get(activation)
        self.inner_activation = activations.get(inner_activation)

        self.input_dim = input_dim
        #self.input = K.placeholder(input_shape)

        # initial states: 2 all-zero tensor of shape (output_dim)
        self.forward_lstm = LSTM(input_dim = input_dim, output_dim = self.output_dim)
        self.backward_lstm = LSTM(input_dim = input_dim, output_dim = self.output_dim)

        self.params = self.forward_lstm.params + self.backward_lstm.params

        #if self.initial_weights is not None:
        #    self.set_weights(self.initial_weights)
        #    del self.initial_weights
项目:ikelos    作者:braingineer    | 项目源码 | 文件源码
def __init__(self, output_dim,
                 init='glorot_uniform', inner_init='orthogonal',
                 forget_bias_init='one', activation='tanh',
                 inner_activation='hard_sigmoid',
                 W_regularizer=None, U_regularizer=None, b_regularizer=None,
                 dropout_W=0., dropout_U=0., **kwargs):
        self.output_dim = output_dim
        self.init = initializations.get(init)
        self.inner_init = initializations.get(inner_init)
        self.forget_bias_init = initializations.get(forget_bias_init)
        self.activation = activations.get(activation)
        self.inner_activation = activations.get(inner_activation)
        self.W_regularizer = regularizers.get(W_regularizer)
        self.U_regularizer = regularizers.get(U_regularizer)
        self.b_regularizer = regularizers.get(b_regularizer)
        self.dropout_W, self.dropout_U = dropout_W, dropout_U

        if self.dropout_W or self.dropout_U:
            self.uses_learning_phase = True
        super(DualCurrent, self).__init__(**kwargs)
项目:ikelos    作者:braingineer    | 项目源码 | 文件源码
def __init__(self, output_dim,
                 init='glorot_uniform', inner_init='orthogonal',
                 activation='tanh', inner_activation='hard_sigmoid',
                 W_regularizer=None, U_regularizer=None, b_regularizer=None,
                 shape_key=None, dropout_W=0., dropout_U=0., **kwargs):
        self.output_dim = output_dim
        self.init = initializations.get(init)
        self.inner_init = initializations.get(inner_init)
        self.activation = activations.get(activation)
        self.inner_activation = activations.get(inner_activation)
        self.W_regularizer = regularizers.get(W_regularizer)
        self.U_regularizer = regularizers.get(U_regularizer)
        self.b_regularizer = regularizers.get(b_regularizer)
        self.dropout_W, self.dropout_U = dropout_W, dropout_U
        self.shape_key = shape_key or {}

        if self.dropout_W or self.dropout_U:
            self.uses_learning_phase = True
        kwargs['consume_less'] = 'gpu'
        super(RTTN, self).__init__(**kwargs)

        self.num_actions = 4
项目:keras_bn_library    作者:bnsnapper    | 项目源码 | 文件源码
def __init__(self, output_dim,
                 init='glorot_uniform', inner_init='orthogonal',
                 forget_bias_init='one', activation='tanh',
                 inner_activation='hard_sigmoid',
                 W_regularizer=None, U_regularizer=None, b_regularizer=None,
                 dropout_W=0., dropout_U=0., **kwargs):

        self.output_dim = output_dim
        self.init = initializations.get(init)
        self.inner_init = initializations.get(inner_init)
        self.forget_bias_init = initializations.get(forget_bias_init)
        self.activation = activations.get(activation)
        self.inner_activation = activations.get(inner_activation)
        self.W_regularizer = regularizers.get(W_regularizer)
        self.U_regularizer = regularizers.get(U_regularizer)
        self.b_regularizer = regularizers.get(b_regularizer)
        self.dropout_W, self.dropout_U = dropout_W, dropout_U

        if self.dropout_W or self.dropout_U:
            self.uses_learning_phase = True
        super(DecoderVaeLSTM, self).__init__(**kwargs)
项目:keras_bn_library    作者:bnsnapper    | 项目源码 | 文件源码
def __init__(self, output_dim,
                 init='glorot_uniform', inner_init='orthogonal',
                 forget_bias_init='one', activation='tanh', inner_activation='hard_sigmoid',
                 W_regularizer=None, U_regularizer=None, b_regularizer=None,
                 dropout_W=0., dropout_U=0., **kwargs):
        self.output_dim = output_dim
        self.init = initializations.get(init)
        self.inner_init = initializations.get(inner_init)
        self.forget_bias_init = initializations.get(forget_bias_init)
        self.activation = activations.get(activation)
        self.inner_activation = activations.get(inner_activation)
        self.W_regularizer = regularizers.get(W_regularizer)
        self.U_regularizer = regularizers.get(U_regularizer)
        self.b_regularizer = regularizers.get(b_regularizer)
        self.dropout_W = dropout_W
        self.dropout_U = dropout_U
        self.stateful = False

        if self.dropout_W or self.dropout_U:
            self.uses_learning_phase = True
        super(QRNN, self).__init__(**kwargs)
项目:onto-lstm    作者:pdasigi    | 项目源码 | 文件源码
def __init__(self, score_dim=1, num_hidden_layers=0, proj_dim=None, init='uniform', composition_type='HPCT',
                 **kwargs):
        self.composition_type = composition_type
        self.supports_masking = True
        self.num_hidden_layers = num_hidden_layers
        self.proj_dim = proj_dim
        self.init = initializations.get(init)
        self.proj_head = None
        self.proj_prep = None
        self.proj_child = None
        self.scorer = None
        self.hidden_layers = []
        self.score_dim = score_dim
        self.allowed_compositions = []
        super(PrepositionPredictor, self).__init__(**kwargs)
项目:onto-lstm    作者:pdasigi    | 项目源码 | 文件源码
def __init__(self, init='uniform', projection_dim=50, weights=None, **kwargs):
        self.intra_attention_weights = weights
        self.init = initializations.get(init)
        self.projection_dim = projection_dim
        super(IntraAttention, self).__init__(**kwargs)
项目:recurrent-attention-for-QA-SQUAD-based-on-keras    作者:wentaozhu    | 项目源码 | 文件源码
def __init__(self, h, output_dim,
                 init='glorot_uniform', **kwargs):
        self.init = initializations.get(init)
        self.h = h
        self.output_dim = output_dim
        #removing the regularizers and the dropout
        super(AttenLayer, self).__init__(**kwargs)
        # this seems necessary in order to accept 3 input dimensions
        # (samples, timesteps, features)
        self.input_spec=[InputSpec(ndim=3)]
项目:keras-utilities    作者:cbaziotis    | 项目源码 | 文件源码
def __init__(self,
                 W_regularizer=None, b_regularizer=None,
                 W_constraint=None, b_constraint=None,
                 bias=True, **kwargs):
        """
        Keras Layer that implements an Attention mechanism for temporal data.
        Supports Masking.
        Follows the work of Raffel et al. [https://arxiv.org/abs/1512.08756]
        # Input shape
            3D tensor with shape: `(samples, steps, features)`.
        # Output shape
            2D tensor with shape: `(samples, features)`.
        :param kwargs:

        Just put it on top of an RNN Layer (GRU/LSTM/SimpleRNN) with return_sequences=True.
        The dimensions are inferred based on the output shape of the RNN.
        Example:
            model.add(LSTM(64, return_sequences=True))
            model.add(Attention())
        """
        self.supports_masking = True
        self.init = initializations.get('glorot_uniform')

        self.W_regularizer = regularizers.get(W_regularizer)
        self.b_regularizer = regularizers.get(b_regularizer)

        self.W_constraint = constraints.get(W_constraint)
        self.b_constraint = constraints.get(b_constraint)

        self.bias = bias
        super(Attention, self).__init__(**kwargs)
项目:dense_tensor    作者:bstriner    | 项目源码 | 文件源码
def get_initializer(initializer):
    if keras_2:
        from keras import initializers
        return initializers.get(initializer)
    else:
        from keras import initializations
        return initializations.get(initializer)
项目:textClassifier    作者:richliao    | 项目源码 | 文件源码
def __init__(self, **kwargs):
        self.init = initializations.get('normal')
        #self.input_spec = [InputSpec(ndim=3)]
        super(AttLayer, self).__init__(**kwargs)
项目:textClassifier    作者:richliao    | 项目源码 | 文件源码
def __init__(self, **kwargs):
        self.init = initializations.get('normal')
        #self.input_spec = [InputSpec(ndim=3)]
        super(AttLayer, self).__init__(**kwargs)
项目:ppap    作者:unique-horn    | 项目源码 | 文件源码
def __init__(self,
               input_channels,
               rows, cols,
               output_shape,
               num_filters,
               hidden_dim,
               init="glorot_uniform"):
    """
    Parameters
    ----------
    output_shape : list_like
        Size of the generated matrix (x, y)
    layer_sizes : array_like
        List of nodes in hidden layers
    init : str
        Keras initializer to use for weights
    """
    self.input_rows = rows
    self.input_cols = cols
    self.input_channels = input_channels
    self.num_filters = num_filters
    self.output_shape = output_shape
    self.hidden_dim = hidden_dim
    self.init = initializations.get(init)
    self.bias_init = initializations.get("zero")
    self.setup_weights()
    self.num_param = np.prod(self.output_shape) * self.num_filters * \
                     self.input_channels
项目:ppap    作者:unique-horn    | 项目源码 | 文件源码
def __init__(self,
                 input_channels,
                 output_shape,
                 num_filters,
                 hidden_dim,
                 init="glorot_uniform"):
        """
        Parameters
        ----------
        output_shape : list_like
            Size of the generated matrix (x, y)
        layer_sizes : array_like
            List of nodes in hidden layers
        init : str
            Keras initializer to use for weights
        """
        self.input_channels = input_channels
        self.num_filters = num_filters
        self.output_shape = output_shape
        self.hidden_dim = hidden_dim
        self.init = initializations.get(init)
        self.bias_init = initializations.get("zero")

        self.setup_weights()
        self.setup_output()
        self.num_param = np.prod(self.output_shape) * self.num_filters * \
                         self.input_channels
项目:ppap    作者:unique-horn    | 项目源码 | 文件源码
def __init__(self,
                 output_shape,
                 z_dim,
                 layer_sizes,
                 scale,
                 init="glorot_uniform"):
        """
        Parameters
        ----------
        output_shape : list_like
            Size of the generated matrix (x, y)
        z_dim : int
            Size of the input z vector
        layer_sizes : list_like
            List of nodes in hidden layers
        scale : float
            Scale used for generating the coordinate matrix
            (see get_coordinates* functions)
        init : str
            Keras initializer to use for weights
        """

        self.output_shape = output_shape
        self.layer_sizes = layer_sizes
        self.z_dim = z_dim
        self.init = initializations.get(init)
        self.bias_init = initializations.get("zero")
        self.scale = scale

        self.setup_weights()
        self.setup_output()
项目:neural-style-keras    作者:robertomest    | 项目源码 | 文件源码
def __init__(self, epsilon=1e-5, weights=None,
                 beta_init='zero', gamma_init='one', **kwargs):
        self.beta_init = initializations.get(beta_init)
        self.gamma_init = initializations.get(gamma_init)
        self.epsilon = epsilon
        super(InstanceNormalization, self).__init__(**kwargs)
项目:KAGGLE_CERVICAL_CANCER_2017    作者:ZFTurbo    | 项目源码 | 文件源码
def __init__(self, weights=None, axis=-1, momentum = 0.9, beta_init='zero', gamma_init='one', **kwargs):
        self.momentum = momentum
        self.axis = axis
        self.beta_init = initializations.get(beta_init)
        self.gamma_init = initializations.get(gamma_init)
        self.initial_weights = weights
        super(Scale, self).__init__(**kwargs)
项目:sciDT    作者:edvisees    | 项目源码 | 文件源码
def __init__(self, output_dim,
                 init='glorot_uniform', activation='linear', weights=None,
                 W_regularizer=None, b_regularizer=None, activity_regularizer=None,
                 W_constraint=None, b_constraint=None,
                 input_dim=None, input_length1=None, input_length2=None, **kwargs):
        self.output_dim = output_dim
        self.init = initializations.get(init)
        self.activation = activations.get(activation)

        self.W_regularizer = regularizers.get(W_regularizer)
        self.b_regularizer = regularizers.get(b_regularizer)
        self.activity_regularizer = regularizers.get(activity_regularizer)

        self.W_constraint = constraints.get(W_constraint)
        self.b_constraint = constraints.get(b_constraint)
        self.constraints = [self.W_constraint, self.b_constraint]

        self.initial_weights = weights

        self.input_dim = input_dim
        self.input_length1 = input_length1
        self.input_length2 = input_length2
        if self.input_dim:
            kwargs['input_shape'] = (self.input_length1, self.input_length2, self.input_dim)
        self.input = K.placeholder(ndim=4)
        super(HigherOrderTimeDistributedDense, self).__init__(**kwargs)
项目:sciDT    作者:edvisees    | 项目源码 | 文件源码
def __init__(self, input_shape, context='word', init='glorot_uniform', activation='tanh', weights=None, **kwargs):
    self.init = initializations.get(init)
    self.activation = activations.get(activation)
    self.context = context
    self.td1, self.td2, self.wd = input_shape
    self.initial_weights = weights
    kwargs['input_shape'] = input_shape
    super(TensorAttention, self).__init__(**kwargs)
项目:nli_generation    作者:jstarc    | 项目源码 | 文件源码
def __init__(self, output_dim, init='glorot_uniform', **kwargs):
        self.init = initializations.get(init)
        self.output_dim = output_dim

        def hshape(n):
            from math import sqrt, ceil
            l1 = ceil(sqrt(n))
            l2 = ceil(n / l1)
            return int(l1), int(l2)

        self.n_classes, self.n_outputs_per_class = hshape(output_dim)
        super(HierarchicalSoftmax, self).__init__(**kwargs)
项目:HighwayNetwork    作者:trangptm    | 项目源码 | 文件源码
def __init__(self, nb_filter, nb_row, nb_col, transform_bias=-1,
                 init='glorot_uniform', activation='relu', weights=None,
                 border_mode='same', subsample=(1, 1), dim_ordering='th',
                 W_regularizer=None, b_regularizer=None, activity_regularizer=None,
                 W_constraint=None, b_constraint=None,
                 bias=True, **kwargs):

        if border_mode not in {'valid', 'same'}:
            raise Exception('Invalid border mode for Convolution2D:', border_mode)
        self.nb_filter = nb_filter
        self.nb_row = nb_row
        self.nb_col = nb_col
        self.transform_bias = transform_bias
        self.init = initializations.get(init, dim_ordering=dim_ordering)
        self.activation = activations.get(activation)
        assert border_mode in {'valid', 'same'}, 'border_mode must be in {valid, same}'
        self.border_mode = border_mode
        self.subsample = tuple(subsample)
        assert dim_ordering in {'tf', 'th'}, 'dim_ordering must be in {tf, th}'
        self.dim_ordering = dim_ordering

        self.W_regularizer = regularizers.get(W_regularizer)
        self.b_regularizer = regularizers.get(b_regularizer)
        self.activity_regularizer = regularizers.get(activity_regularizer)

        self.W_constraint = constraints.get(W_constraint)
        self.b_constraint = constraints.get(b_constraint)

        self.bias = bias
        self.input_spec = [InputSpec(ndim=4)]
        self.initial_weights = weights
        super(Conv2DHighway, self).__init__(**kwargs)
项目:cnn_finetune    作者:flyyufelix    | 项目源码 | 文件源码
def __init__(self, weights=None, axis=-1, momentum = 0.9, beta_init='zero', gamma_init='one', **kwargs):
        self.momentum = momentum
        self.axis = axis
        self.beta_init = initializations.get(beta_init)
        self.gamma_init = initializations.get(gamma_init)
        self.initial_weights = weights
        super(Scale, self).__init__(**kwargs)
项目:keras-prednet    作者:kunimasa-kawasaki    | 项目源码 | 文件源码
def __init__(self, nb_filter, nb_row, nb_col,
                 init='glorot_uniform', inner_init='orthogonal',
                 forget_bias_init='one', activation='tanh',
                 inner_activation='hard_sigmoid', dim_ordering="tf",
                 border_mode="valid", sub_sample=(1, 1),
                 W_regularizer=None, U_regularizer=None, b_regularizer=None,
                 dropout_W=0., dropout_U=0., **kwargs):
        self.nb_filter = nb_filter
        self.nb_row = nb_row
        self.nb_col = nb_col
        self.init = initializations.get(init)
        self.inner_init = initializations.get(inner_init)
        self.forget_bias_init = initializations.get(forget_bias_init)
        self.activation = activations.get(activation)
        self.inner_activation = activations.get(inner_activation)
        self.border_mode = border_mode
        self.subsample = sub_sample

        assert dim_ordering in {'tf', "th"}, 'dim_ordering must be in {tf,"th}'
        self.dim_ordering = dim_ordering

        kwargs["nb_filter"] = nb_filter
        kwargs["nb_row"] = nb_row
        kwargs["nb_col"] = nb_col
        kwargs["dim_ordering"] = dim_ordering

        self.W_regularizer = W_regularizer
        self.U_regularizer = U_regularizer
        self.b_regularizer = b_regularizer
        self.dropout_W, self.dropout_U = dropout_W, dropout_U

        super(LSTMConv2D, self).__init__(**kwargs)
项目:c2w2c    作者:milankinen    | 项目源码 | 文件源码
def __init__(self, output_dim, weights=None, activation='linear', return_mask=True, **kwargs):
    self.supports_masking = True
    self.output_dim       = output_dim
    self.init             = initializations.get('glorot_uniform')
    self.activation       = activations.get(activation)
    self.initial_weights  = weights
    self.return_mask      = return_mask
    super(Projection, self).__init__(**kwargs)
项目:kaggle_airbnb    作者:svegapons    | 项目源码 | 文件源码
def on_epoch_end(self, epoch, logs={}):
        current = logs.get(self.monitor)

        if self.monitor_op(current, self.best):
            self.best = current
            self.best_epoch = epoch
            self.wait = 0
        else:
            if self.wait >= self.patience:
                if self.verbose > 0:
                    print('Epoch %05d: early stopping' % (epoch))
                self.model.stop_training = True
            self.wait += 1
项目:kaggle_airbnb    作者:svegapons    | 项目源码 | 文件源码
def __init__(self, init='zero', weights=None, **kwargs):
        self.init = initializations.get(init)
        self.initial_weights = weights
        self.alphas = None
        super(MyPReLU, self).__init__(**kwargs)
项目:KerasCog    作者:ABAtanasov    | 项目源码 | 文件源码
def __init__(self, output_dim, 
                 init = 'glorot_uniform', inner_init = 'orthogonal',
                 activation = 'tanh', W_regularizer = None, 
                 U_regularizer = None, b_regularizer = None, 
                 dropout_W = 0.0, dropout_U = 0.0,
                 tau=100, dt=20, noise=.1,
                 dale_ratio = None, **kwargs):
        self.output_dim = output_dim
        self.init = initializations.get(init)
        self.inner_init = initializations.get(inner_init)
        self.activation = activations.get(activation)
        self.W_regularizer = regularizers.get(W_regularizer)
        self.U_regularizer = regularizers.get(U_regularizer)
        self.b_regularizer = regularizers.get(b_regularizer)
        self.dropout_W, self.dropout_U = dropout_W, dropout_U
        self.tau = tau
        self.dt = dt
        self.noise = noise
        self.dale_ratio = dale_ratio
        if dale_ratio:

            #make dales law matrix
            dale_vec = np.ones(output_dim)
            dale_vec[int(dale_ratio*output_dim):] = -1
            dale = np.diag(dale_vec)
            self.Dale = K.variable(dale)
        if self.dropout_W or self.dropout_U:
            self.uses_learning_phase = True
        super(leak_recurrent, self).__init__(**kwargs)
项目:KerasCog    作者:ABAtanasov    | 项目源码 | 文件源码
def __init__(self, output_dim, init='glorot_uniform',
                 activation='linear', weights=None,
                 W_regularizer=None, b_regularizer=None, activity_regularizer=None,
                 W_constraint=None, b_constraint=None,
                 bias=False, input_dim=None, dale_ratio = .8, **kwargs):
        self.init = initializations.get(init)
        self.activation = activations.get(activation)
        self.output_dim = output_dim
        self.input_dim = input_dim

        self.W_regularizer = regularizers.get(W_regularizer)
        self.b_regularizer = regularizers.get(b_regularizer)
        self.activity_regularizer = regularizers.get(activity_regularizer)

        self.W_constraint = constraints.get(W_constraint)
        self.b_constraint = constraints.get(b_constraint)

        self.bias = bias
        self.initial_weights = weights
        self.input_spec = [InputSpec(ndim=2)]

        # OUR CHANGE
        self.dale_ratio = dale_ratio
        if dale_ratio:
            dale_vec = np.ones((input_dim, 1))
            dale_vec[int(dale_ratio*input_dim):, 0] = 0
            self.Dale = K.variable(dale_vec)

        if self.input_dim:
            kwargs['input_shape'] = (self.input_dim,)

        super(Dense, self).__init__(**kwargs)
项目:head-segmentation    作者:szywind    | 项目源码 | 文件源码
def __init__(self, weights=None, axis=-1, momentum=0.9, beta_init='zero', gamma_init='one', **kwargs):
        self.momentum = momentum
        self.axis = axis
        self.beta_init = initializations.get(beta_init)
        self.gamma_init = initializations.get(gamma_init)
        self.initial_weights = weights
        super(Scale, self).__init__(**kwargs)
项目:neural_style    作者:metaflow-ai    | 项目源码 | 文件源码
def __init__(self, nb_filter, nb_row, nb_col, rate=2,
                 init='glorot_uniform', activation='linear', weights=None,
                 border_mode='valid', dim_ordering=K.image_dim_ordering(),
                 W_regularizer=None, b_regularizer=None, activity_regularizer=None,
                 W_constraint=None, b_constraint=None,
                 bias=True, **kwargs):
        if K._BACKEND != 'tensorflow':
            raise Exception('TensorBoard callback only works '
                            'with the TensorFlow backend.')

        if border_mode not in {'valid', 'same'}:
            raise Exception('Invalid border mode for Convolution2D:', border_mode)
        self.nb_filter = nb_filter
        self.nb_row = nb_row
        self.nb_col = nb_col
        self.rate = rate
        self.init = initializations.get(init, dim_ordering=dim_ordering)
        self.activation = activations.get(activation)
        assert border_mode in {'valid', 'same'}, 'border_mode must be in {valid, same}'
        self.border_mode = border_mode
        assert dim_ordering in {'tf', 'th'}, 'dim_ordering must be in {tf, th}'
        self.dim_ordering = dim_ordering

        self.W_regularizer = regularizers.get(W_regularizer)
        self.b_regularizer = regularizers.get(b_regularizer)
        self.activity_regularizer = regularizers.get(activity_regularizer)

        self.W_constraint = constraints.get(W_constraint)
        self.b_constraint = constraints.get(b_constraint)

        self.bias = bias
        self.input_spec = [InputSpec(ndim=4)]
        self.initial_weights = weights
        super(ATrousConvolution2D, self).__init__(**kwargs)
项目:neural_style    作者:metaflow-ai    | 项目源码 | 文件源码
def __init__(self, nb_filter, nb_row, nb_col,
                 init='glorot_uniform', activation='linear', weights=None,
                 border_mode='valid', subsample=(1, 1), dim_ordering=K.image_dim_ordering(),
                 W_regularizer=None, b_regularizer=None, activity_regularizer=None,
                 W_constraint=None, b_constraint=None,
                 bias=True, **kwargs):

        if border_mode not in {'valid', 'same'}:
            raise Exception('Invalid border mode for Convolution2D:', border_mode)
        self.nb_filter = nb_filter
        self.nb_row = nb_row
        self.nb_col = nb_col
        self.dim_ordering = dim_ordering
        self.init = initializations.get(init, dim_ordering=self.dim_ordering)
        self.activation = activations.get(activation)
        assert border_mode in {'valid', 'same'}, 'border_mode must be in {valid, same}'
        self.border_mode = border_mode
        self.subsample = tuple(subsample)


        self.W_regularizer = regularizers.get(W_regularizer)
        self.b_regularizer = regularizers.get(b_regularizer)
        self.activity_regularizer = regularizers.get(activity_regularizer)

        self.W_constraint = constraints.get(W_constraint)
        self.b_constraint = constraints.get(b_constraint)

        self.bias = bias
        self.input_spec = [InputSpec(ndim=4)]
        self.initial_weights = weights

        super(ConvolutionTranspose2D, self).__init__(**kwargs)
项目:audit-log-detection    作者:twosixlabs    | 项目源码 | 文件源码
def __init__(self, epsilon=1e-6, mode=0, axis=-1, momentum=0.9,
                 weights=None, beta_init='zero', gamma_init='one', **kwargs):
        self.beta_init = initializations.get(beta_init)
        self.gamma_init = initializations.get(gamma_init)
        self.epsilon = epsilon
        self.mode = mode
        self.axis = axis
        self.momentum = momentum
        self.initial_weights = weights
        if self.mode == 0:
            self.uses_learning_phase = True
        super(BatchNormalization, self).__init__(**kwargs)
项目:Keras-CNN-QA    作者:shashankg7    | 项目源码 | 文件源码
def __init__(self, output_dim, **kwargs):
        self.output_dim = output_dim
        self.init = initializations.get('glorot_uniform')
        super(SimLayer, self).__init__(**kwargs)