Python chainer.functions 模块,clipped_relu() 实例源码

我们从Python开源项目中,提取了以下25个代码示例,用于说明如何使用chainer.functions.clipped_relu()

项目:ddnn    作者:kunglab    | 项目源码 | 文件源码
def to_function(self):
        if self.nonlinearity.lower() == "clipped_relu":
            return clipped_relu()
        if self.nonlinearity.lower() == "crelu":
            return crelu()
        if self.nonlinearity.lower() == "elu":
            return elu()
        if self.nonlinearity.lower() == "hard_sigmoid":
            return hard_sigmoid()
        if self.nonlinearity.lower() == "leaky_relu":
            return leaky_relu()
        if self.nonlinearity.lower() == "relu":
            return relu()
        if self.nonlinearity.lower() == "sigmoid":
            return sigmoid()
        if self.nonlinearity.lower() == "softmax":
            return softmax()
        if self.nonlinearity.lower() == "softplus":
            return softplus()
        if self.nonlinearity.lower() == "tanh":
            return tanh()
        if self.nonlinearity.lower() == "bst":
            return bst()
        raise NotImplementedError()
项目:unrolled-gan    作者:musyoku    | 项目源码 | 文件源码
def to_function(self):
        if self.nonlinearity.lower() == "clipped_relu":
            return clipped_relu()
        if self.nonlinearity.lower() == "crelu":
            return crelu()
        if self.nonlinearity.lower() == "elu":
            return elu()
        if self.nonlinearity.lower() == "hard_sigmoid":
            return hard_sigmoid()
        if self.nonlinearity.lower() == "leaky_relu":
            return leaky_relu()
        if self.nonlinearity.lower() == "relu":
            return relu()
        if self.nonlinearity.lower() == "sigmoid":
            return sigmoid()
        if self.nonlinearity.lower() == "softmax":
            return softmax()
        if self.nonlinearity.lower() == "softplus":
            return softplus()
        if self.nonlinearity.lower() == "tanh":
            return tanh()
        raise NotImplementedError()
项目:SeRanet    作者:corochann    | 项目源码 | 文件源码
def __call__(self, x, t=None):
        self.clear()
        #x = Variable(x_data)  # x_data.astype(np.float32)

        h = F.leaky_relu(self.conv1(x), slope=0.1)
        h = F.leaky_relu(self.conv2(h), slope=0.1)
        h = F.leaky_relu(self.conv3(h), slope=0.1)
        h = F.leaky_relu(self.conv4(h), slope=0.1)
        h = F.leaky_relu(self.conv5(h), slope=0.1)
        h = F.leaky_relu(self.conv6(h), slope=0.1)
        h = F.clipped_relu(self.conv7(h), z=1.0)
        if self.train:
            self.loss = F.mean_squared_error(h, t)
            return self.loss
        else:
            return h
项目:LSGAN    作者:musyoku    | 项目源码 | 文件源码
def to_function(self):
        if self.nonlinearity.lower() == "clipped_relu":
            return clipped_relu()
        if self.nonlinearity.lower() == "crelu":
            return crelu()
        if self.nonlinearity.lower() == "elu":
            return elu()
        if self.nonlinearity.lower() == "hard_sigmoid":
            return hard_sigmoid()
        if self.nonlinearity.lower() == "leaky_relu":
            return leaky_relu()
        if self.nonlinearity.lower() == "relu":
            return relu()
        if self.nonlinearity.lower() == "sigmoid":
            return sigmoid()
        if self.nonlinearity.lower() == "softmax":
            return softmax()
        if self.nonlinearity.lower() == "softplus":
            return softplus()
        if self.nonlinearity.lower() == "tanh":
            return tanh()
        raise NotImplementedError()
项目:adgm    作者:musyoku    | 项目源码 | 文件源码
def to_function(self):
        if self.nonlinearity.lower() == "clipped_relu":
            return clipped_relu()
        if self.nonlinearity.lower() == "crelu":
            return crelu()
        if self.nonlinearity.lower() == "elu":
            return elu()
        if self.nonlinearity.lower() == "hard_sigmoid":
            return hard_sigmoid()
        if self.nonlinearity.lower() == "leaky_relu":
            return leaky_relu()
        if self.nonlinearity.lower() == "relu":
            return relu()
        if self.nonlinearity.lower() == "sigmoid":
            return sigmoid()
        if self.nonlinearity.lower() == "softmax":
            return softmax()
        if self.nonlinearity.lower() == "softplus":
            return softplus()
        if self.nonlinearity.lower() == "tanh":
            return tanh()
        raise NotImplementedError()
项目:chainer-speech-recognition    作者:musyoku    | 项目源码 | 文件源码
def __call__(self, x):
        return functions.clipped_relu(x, self.z)
项目:chainer-deconv    作者:germanRos    | 项目源码 | 文件源码
def check_forward(self, x_data):
        x = chainer.Variable(x_data)
        y = functions.clipped_relu(x, self.z)
        self.assertEqual(y.data.dtype, self.dtype)

        y_expect = self.x.copy()
        for i in numpy.ndindex(self.x.shape):
            if self.x[i] < 0:
                y_expect[i] = 0
            elif self.x[i] > self.z:
                y_expect[i] = self.z

        gradient_check.assert_allclose(y_expect, y.data)
项目:nn_mask    作者:ZitengWang    | 项目源码 | 文件源码
def _propagate(self, Y, dropout=0.):
        blstm = self.blstm_layer(Y, dropout=dropout)
        relu_1 = F.clipped_relu(self.relu_1(blstm, dropout=dropout))
        relu_2 = F.clipped_relu(self.relu_2(relu_1, dropout=dropout))
        N_mask = F.sigmoid(self.noise_mask_estimate(relu_2))
        X_mask = F.sigmoid(self.speech_mask_estimate(relu_2))
        return N_mask, X_mask
项目:nn_mask    作者:ZitengWang    | 项目源码 | 文件源码
def _propagate(self, Y, dropout=0.):
        relu_1 = F.clipped_relu(self.relu_1(Y, dropout=dropout))
        relu_2 = F.clipped_relu(self.relu_2(relu_1, dropout=dropout))
        relu_3 = F.clipped_relu(self.relu_3(relu_2, dropout=dropout))
        N_mask = F.sigmoid(self.noise_mask_estimate(relu_3))
        X_mask = F.sigmoid(self.speech_mask_estimate(relu_3))
        return N_mask, X_mask
项目:ddnn    作者:kunglab    | 项目源码 | 文件源码
def __init__(self, z=20.0):
        self._function = "clipped_relu"
        self.z = z
项目:ddnn    作者:kunglab    | 项目源码 | 文件源码
def __call__(self, x):
        return F.clipped_relu(x, self.z)
项目:adversarial-autoencoder    作者:musyoku    | 项目源码 | 文件源码
def __call__(self, x):
        return functions.clipped_relu(x, self.z)
项目:nn-gev    作者:fgnt    | 项目源码 | 文件源码
def _propagate(self, Y, dropout=0.):
        blstm = self.blstm_layer(Y, dropout=dropout)
        relu_1 = F.clipped_relu(self.relu_1(blstm, dropout=dropout))
        relu_2 = F.clipped_relu(self.relu_2(relu_1, dropout=dropout))
        N_mask = F.sigmoid(self.noise_mask_estimate(relu_2))
        X_mask = F.sigmoid(self.speech_mask_estimate(relu_2))
        return N_mask, X_mask
项目:nn-gev    作者:fgnt    | 项目源码 | 文件源码
def _propagate(self, Y, dropout=0.):
        relu_1 = F.clipped_relu(self.relu_1(Y, dropout=dropout))
        N_mask = F.sigmoid(self.noise_mask_estimate(relu_1))
        X_mask = F.sigmoid(self.speech_mask_estimate(relu_1))
        return N_mask, X_mask
项目:unrolled-gan    作者:musyoku    | 项目源码 | 文件源码
def __init__(self, z=20.0):
        self._function = "clipped_relu"
        self.z = z
项目:unrolled-gan    作者:musyoku    | 项目源码 | 文件源码
def __call__(self, x):
        return F.clipped_relu(x, self.z)
项目:SeRanet    作者:corochann    | 项目源码 | 文件源码
def __call__(self, x, t=None):
        self.clear()
        h1 = F.leaky_relu(self.conv1(x), slope=0.1)
        h1 = F.leaky_relu(self.conv2(h1), slope=0.1)
        h1 = F.leaky_relu(self.conv3(h1), slope=0.1)

        h2 = self.seranet_v1_crbm(x)
        # Fusion
        h12 = F.concat((h1, h2), axis=1)

        lu = F.leaky_relu(self.convlu6(h12), slope=0.1)
        lu = F.leaky_relu(self.convlu7(lu), slope=0.1)
        lu = F.leaky_relu(self.convlu8(lu), slope=0.1)
        ru = F.leaky_relu(self.convru6(h12), slope=0.1)
        ru = F.leaky_relu(self.convru7(ru), slope=0.1)
        ru = F.leaky_relu(self.convru8(ru), slope=0.1)
        ld = F.leaky_relu(self.convld6(h12), slope=0.1)
        ld = F.leaky_relu(self.convld7(ld), slope=0.1)
        ld = F.leaky_relu(self.convld8(ld), slope=0.1)
        rd = F.leaky_relu(self.convrd6(h12), slope=0.1)
        rd = F.leaky_relu(self.convrd7(rd), slope=0.1)
        rd = F.leaky_relu(self.convrd8(rd), slope=0.1)

        # Splice
        h = CF.splice(lu, ru, ld, rd)

        h = F.leaky_relu(self.conv9(h), slope=0.1)
        h = F.leaky_relu(self.conv10(h), slope=0.1)
        h = F.leaky_relu(self.conv11(h), slope=0.1)
        h = F.clipped_relu(self.conv12(h), z=1.0)
        if self.train:
            self.loss = F.mean_squared_error(h, t)
            return self.loss
        else:
            return h
项目:SeRanet    作者:corochann    | 项目源码 | 文件源码
def __call__(self, x, t=None):
        self.clear()

        h = F.leaky_relu(self.conv1(x), slope=0.1)
        h = F.leaky_relu(self.conv2(h), slope=0.1)
        h = F.leaky_relu(self.conv3(h), slope=0.1)
        h = F.leaky_relu(self.conv4(h), slope=0.1)
        h = F.leaky_relu(self.conv5(h), slope=0.1)
        h = F.leaky_relu(self.conv6(h), slope=0.1)
        h = F.clipped_relu(self.conv7(h), z=1.0)
        if self.train:
            self.loss = F.mean_squared_error(h, t)
            return self.loss
        else:
            return h
项目:SeRanet    作者:corochann    | 项目源码 | 文件源码
def __call__(self, x, t=None):
        self.clear()

        h = F.leaky_relu(self.conv1(x), slope=0.1)
        h = F.leaky_relu(self.conv2(h), slope=0.1)
        #h = F.leaky_relu(self.conv3(h), slope=0.1)
        #h = F.leaky_relu(self.conv4(h), slope=0.1)
        h = F.clipped_relu(self.conv3(h), z=1.0)
        if self.train:
            self.loss = F.mean_squared_error(h, t)
            return self.loss
        else:
            return h
项目:SeRanet    作者:corochann    | 项目源码 | 文件源码
def __call__(self, x, t=None):
        self.clear()

        h = F.leaky_relu(self.conv1(x), slope=0.1)
        h = F.leaky_relu(self.conv2(h), slope=0.1)
        h = F.leaky_relu(self.conv3(h), slope=0.1)
        h = F.leaky_relu(self.conv4(h), slope=0.1)
        h = F.leaky_relu(self.conv5(h), slope=0.1)
        h = F.leaky_relu(self.conv6(h), slope=0.1)
        h = F.clipped_relu(self.conv7(h), z=1.0)
        if self.train:
            self.loss = F.mean_squared_error(h, t)
            return self.loss
        else:
            return h
项目:LSGAN    作者:musyoku    | 项目源码 | 文件源码
def __init__(self, z=20.0):
        self._function = "clipped_relu"
        self.z = z
项目:LSGAN    作者:musyoku    | 项目源码 | 文件源码
def __call__(self, x):
        return F.clipped_relu(x, self.z)
项目:adgm    作者:musyoku    | 项目源码 | 文件源码
def __init__(self, z=20.0):
        self._function = "clipped_relu"
        self.z = z
项目:adgm    作者:musyoku    | 项目源码 | 文件源码
def __call__(self, x):
        return F.clipped_relu(x, self.z)
项目:brain_segmentation    作者:Ryo-Ito    | 项目源码 | 文件源码
def __call__(self, x, train=False):
        """
        calculate output of VoxResNet given input x

        Parameters
        ----------
        x : (batch_size, in_channels, xlen, ylen, zlen) ndarray
            image to perform semantic segmentation

        Returns
        -------
        proba: (batch_size, n_classes, xlen, ylen, zlen) ndarray
            probability of each voxel belonging each class
            elif train=True, returns list of logits
        """
        h = self.conv1a(x)
        h = F.relu(self.bnorm1a(h, test=not train))
        h = self.conv1b(h)
        c1 = F.clipped_relu(self.c1deconv(h))
        c1 = self.c1conv(c1)

        h = F.relu(self.bnorm1b(h, test=not train))
        h = self.conv1c(h)
        h = self.voxres2(h, train)
        h = self.voxres3(h, train)
        c2 = F.clipped_relu(self.c2deconv(h))
        c2 = self.c2conv(c2)

        h = F.relu(self.bnorm3(h, test=not train))
        h = self.conv4(h)
        h = self.voxres5(h, train)
        h = self.voxres6(h, train)
        c3 = F.clipped_relu(self.c3deconv(h))
        c3 = self.c3conv(c3)

        h = F.relu(self.bnorm6(h, test=not train))
        h = self.conv7(h)
        h = self.voxres8(h, train)
        h = self.voxres9(h, train)
        c4 = F.clipped_relu(self.c4deconv(h))
        c4 = self.c4conv(c4)

        c = c1 + c2 + c3 + c4
        if train:
            return [c1, c2, c3, c4, c]
        else:
            return F.softmax(c)