Python lasagne.layers 模块,concat() 实例源码

我们从Python开源项目中,提取了以下6个代码示例,用于说明如何使用lasagne.layers.concat()

项目:third_person_im    作者:bstadie    | 项目源码 | 文件源码
def get_params_internal(self, **tags):  # this gives ALL the vars (not the params values)
        return L.get_all_params(  # this lasagne function also returns all var below the passed layers
            L.concat(self._output_layers),
            **tags
        )
项目:rllabplusplus    作者:shaneshixiang    | 项目源码 | 文件源码
def get_params_internal(self, **tags):  # this gives ALL the vars (not the params values)
        return L.get_all_params(  # this lasagne function also returns all var below the passed layers
            L.concat(self._output_layers),
            **tags
        )
项目:EAC-Net    作者:wiibrew    | 项目源码 | 文件源码
def build_model():
    net = {}
    net['input'] = InputLayer((None, 512*20, 3, 3))

    au_fc_layers=[]
    for i in range(20):
        net['roi_AU_N_'+str(i)]=SliceLayer(net['input'],indices=slice(i*512,(i+1)*512),axis=1)

        #try to adding upsampling here for more conv

        net['Roi_upsample_'+str(i)]=Upscale2DLayer(net['roi_AU_N_'+str(i)],scale_factor=2)

        net['conv_roi_'+str(i)]=ConvLayer(net['Roi_upsample_'+str(i)],512,3)

        net['au_fc_'+str(i)]=DenseLayer(net['conv_roi_'+str(i)],num_units=150)

        au_fc_layers+=[net['au_fc_'+str(i)]]

    #
    net['local_fc']=concat(au_fc_layers)
    net['local_fc2']=DenseLayer(net['local_fc'],num_units=2048)

    net['local_fc_dp']=DropoutLayer(net['local_fc2'],p=0.5)


    # net['fc_comb']=concat([net['au_fc_layer'],net['local_fc_dp']])


    # net['fc_dense']=DenseLayer(net['fc_comb'],num_units=1024)

    # net['fc_dense_dp']=DropoutLayer(net['fc_dense'],p=0.3)

    net['real_out']=DenseLayer(net['local_fc_dp'],num_units=12,nonlinearity=sigmoid)


    # net['final']=concat([net['pred_pos_layer'],net['output_layer']])

    return net
项目:BiDNN    作者:v-v    | 项目源码 | 文件源码
def __create_toplogy__(self, input_var_first=None, input_var_second=None):
        # define network topology
        if (self.conf.rep % 2 != 0):
            raise ValueError("Representation size should be divisible by two as it's formed by combining two crossmodal translations", self.conf.rep)

        # input layers
        l_in_first  = InputLayer(shape=(self.conf.batch_size, self.conf.mod1size), input_var=input_var_first)
        l_in_second = InputLayer(shape=(self.conf.batch_size, self.conf.mod2size), input_var=input_var_second)

        # first -> second
        l_hidden1_first   = DenseLayer(l_in_first, num_units=self.conf.hdn, nonlinearity=self.conf.act, W=GlorotUniform())         # enc1
        l_hidden2_first   = DenseLayer(l_hidden1_first, num_units=self.conf.rep//2, nonlinearity=self.conf.act, W=GlorotUniform()) # enc2
        l_hidden2_first_d = DropoutLayer(l_hidden2_first, p=self.conf.dropout)
        l_hidden3_first   = DenseLayer(l_hidden2_first_d, num_units=self.conf.hdn, nonlinearity=self.conf.act, W=GlorotUniform())    # dec1
        l_out_first       = DenseLayer(l_hidden3_first, num_units=self.conf.mod2size, nonlinearity=self.conf.act, W=GlorotUniform()) # dec2

        if self.conf.untied:
            # FREE
            l_hidden1_second   = DenseLayer(l_in_second, num_units=self.conf.hdn, nonlinearity=self.conf.act, W=GlorotUniform())         # enc1
            l_hidden2_second   = DenseLayer(l_hidden1_second, num_units=self.conf.rep//2, nonlinearity=self.conf.act, W=GlorotUniform()) # enc2
            l_hidden2_second_d = DropoutLayer(l_hidden2_second, p=self.conf.dropout)
            l_hidden3_second   = DenseLayer(l_hidden2_second_d, num_units=self.conf.hdn, nonlinearity=self.conf.act, W=GlorotUniform())    # dec1
            l_out_second       = DenseLayer(l_hidden3_second, num_units=self.conf.mod1size, nonlinearity=self.conf.act, W=GlorotUniform()) # dec2
        else:
            # TIED middle
            l_hidden1_second   = DenseLayer(l_in_second, num_units=self.conf.hdn, nonlinearity=self.conf.act, W=GlorotUniform())             # enc1
            l_hidden2_second   = DenseLayer(l_hidden1_second, num_units=self.conf.rep//2, nonlinearity=self.conf.act, W=l_hidden3_first.W.T) # enc2
            l_hidden2_second_d = DropoutLayer(l_hidden2_second, p=self.conf.dropout)
            l_hidden3_second   = DenseLayer(l_hidden2_second_d, num_units=self.conf.hdn, nonlinearity=self.conf.act, W=l_hidden2_first.W.T) # dec1
            l_out_second       = DenseLayer(l_hidden3_second, num_units=self.conf.mod1size, nonlinearity=self.conf.act, W=GlorotUniform())  # dec2

        l_out = concat([l_out_first, l_out_second])

        return l_out, l_hidden2_first, l_hidden2_second
项目:rllab    作者:rll    | 项目源码 | 文件源码
def get_params_internal(self, **tags):  # this gives ALL the vars (not the params values)
        return L.get_all_params(  # this lasagne function also returns all var below the passed layers
            L.concat(self._output_layers),
            **tags
        )
项目:maml_rl    作者:cbfinn    | 项目源码 | 文件源码
def get_params_internal(self, **tags):  # this gives ALL the vars (not the params values)
        return L.get_all_params(  # this lasagne function also returns all var below the passed layers
            L.concat(self._output_layers),
            **tags
        )